From 40144c806338f3c653568934f3151b32304d4c4c Mon Sep 17 00:00:00 2001 From: Laurent Mazuel Date: Tue, 1 Sep 2020 14:18:40 -0700 Subject: [PATCH 1/4] Synapse regenerated on 9/1 with autorest 5.2 preview --- .../azure/synapse/accesscontrol/__init__.py | 2 +- .../accesscontrol/_access_control_client.py | 1 - .../synapse/accesscontrol/_configuration.py | 4 +- .../synapse/accesscontrol/_metadata.json | 129 --- .../azure/synapse/accesscontrol/_version.py | 2 +- .../synapse/accesscontrol/aio/__init__.py | 2 +- ...ent_async.py => _access_control_client.py} | 5 +- ...nfiguration_async.py => _configuration.py} | 4 +- .../__init__.py | 2 +- .../_access_control_client_operations.py} | 21 +- .../_access_control_client_operations.py | 21 +- .../azure/synapse/artifacts/__init__.py | 2 +- .../azure/synapse/artifacts/_configuration.py | 4 +- .../azure/synapse/artifacts/_metadata.json | 63 -- .../azure/synapse/artifacts/_version.py | 2 +- .../azure/synapse/artifacts/aio/__init__.py | 2 +- ...s_client_async.py => _artifacts_client.py} | 46 +- ...nfiguration_async.py => _configuration.py} | 4 +- .../__init__.py | 22 +- .../_data_flow_debug_session_operations.py} | 18 +- .../_data_flow_operations.py} | 183 +++- .../_dataset_operations.py} | 183 +++- .../_linked_service_operations.py} | 183 +++- .../_notebook_operations.py} | 186 +++- .../_pipeline_operations.py} | 189 +++- .../_pipeline_run_operations.py} | 13 +- .../_spark_job_definition_operations.py} | 19 +- .../_sql_script_operations.py} | 12 +- .../_trigger_operations.py} | 192 +++- .../_trigger_run_operations.py} | 60 +- .../synapse/artifacts/models/__init__.py | 46 + .../models/_artifacts_client_enums.py | 628 +++++++------ .../azure/synapse/artifacts/models/_models.py | 842 ++++++++++------- .../synapse/artifacts/models/_models_py3.py | 886 +++++++++++------- .../_data_flow_debug_session_operations.py | 18 +- .../operations/_data_flow_operations.py | 185 +++- .../operations/_dataset_operations.py | 185 +++- .../operations/_linked_service_operations.py | 185 +++- .../operations/_notebook_operations.py | 188 +++- .../operations/_pipeline_operations.py | 191 +++- .../operations/_pipeline_run_operations.py | 13 +- .../_spark_job_definition_operations.py | 19 +- .../operations/_sql_script_operations.py | 12 +- .../operations/_trigger_operations.py | 194 +++- .../operations/_trigger_run_operations.py | 61 +- .../azure/synapse/spark/__init__.py | 2 +- .../azure/synapse/spark/_configuration.py | 4 +- .../azure/synapse/spark/_metadata.json | 78 -- .../azure/synapse/spark/_spark_client.py | 1 - .../azure/synapse/spark/_version.py | 2 +- .../azure/synapse/spark/aio/__init__.py | 2 +- ...nfiguration_async.py => _configuration.py} | 4 +- ...spark_client_async.py => _spark_client.py} | 11 +- .../__init__.py | 4 +- .../_spark_batch_operations.py} | 10 +- .../_spark_session_operations.py} | 23 +- .../azure/synapse/spark/models/_models.py | 4 +- .../azure/synapse/spark/models/_models_py3.py | 4 +- .../spark/models/_spark_client_enums.py | 90 +- .../operations/_spark_batch_operations.py | 10 +- .../operations/_spark_session_operations.py | 23 +- 61 files changed, 3661 insertions(+), 1840 deletions(-) delete mode 100644 sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/_metadata.json rename sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/aio/{_access_control_client_async.py => _access_control_client.py} (88%) rename sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/aio/{_configuration_async.py => _configuration.py} (93%) rename sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/aio/{operations_async => operations}/__init__.py (85%) rename sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/aio/{operations_async/_access_control_client_operations_async.py => operations/_access_control_client_operations.py} (95%) delete mode 100644 sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/_metadata.json rename sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/{_artifacts_client_async.py => _artifacts_client.py} (76%) rename sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/{_configuration_async.py => _configuration.py} (93%) rename sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/{operations_async => operations}/__init__.py (52%) rename sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/{operations_async/_data_flow_debug_session_operations_async.py => operations/_data_flow_debug_session_operations.py} (97%) rename sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/{operations_async/_data_flow_operations_async.py => operations/_data_flow_operations.py} (65%) rename sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/{operations_async/_dataset_operations_async.py => operations/_dataset_operations.py} (66%) rename sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/{operations_async/_linked_service_operations_async.py => operations/_linked_service_operations.py} (66%) rename sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/{operations_async/_notebook_operations_async.py => operations/_notebook_operations.py} (70%) rename sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/{operations_async/_pipeline_operations_async.py => operations/_pipeline_operations.py} (71%) rename sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/{operations_async/_pipeline_run_operations_async.py => operations/_pipeline_run_operations.py} (96%) rename sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/{operations_async/_spark_job_definition_operations_async.py => operations/_spark_job_definition_operations.py} (97%) rename sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/{operations_async/_sql_script_operations_async.py => operations/_sql_script_operations.py} (96%) rename sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/{operations_async/_trigger_operations_async.py => operations/_trigger_operations.py} (83%) rename sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/{operations_async/_trigger_run_operations_async.py => operations/_trigger_run_operations.py} (71%) delete mode 100644 sdk/synapse/azure-synapse-spark/azure/synapse/spark/_metadata.json rename sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/{_configuration_async.py => _configuration.py} (94%) rename sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/{_spark_client_async.py => _spark_client.py} (84%) rename sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/{operations_async => operations}/__init__.py (80%) rename sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/{operations_async/_spark_batch_operations_async.py => operations/_spark_batch_operations.py} (97%) rename sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/{operations_async/_spark_session_operations_async.py => operations/_spark_session_operations.py} (96%) diff --git a/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/__init__.py b/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/__init__.py index 37019086a1cd..aa7e87061bc7 100644 --- a/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/__init__.py +++ b/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/__init__.py @@ -13,7 +13,7 @@ __all__ = ['AccessControlClient'] try: - from ._patch import patch_sdk + from ._patch import patch_sdk # type: ignore patch_sdk() except ImportError: pass diff --git a/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/_access_control_client.py b/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/_access_control_client.py index 1ffdfa416790..1e9b6a7168a1 100644 --- a/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/_access_control_client.py +++ b/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/_access_control_client.py @@ -29,7 +29,6 @@ class AccessControlClient(AccessControlClientOperationsMixin): :type credential: ~azure.core.credentials.TokenCredential :param endpoint: The workspace development endpoint, for example https://myworkspace.dev.azuresynapse.net. :type endpoint: str - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. """ def __init__( diff --git a/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/_configuration.py b/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/_configuration.py index e69c182dd453..14b01add185f 100644 --- a/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/_configuration.py +++ b/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/_configuration.py @@ -48,8 +48,7 @@ def __init__( self.credential = credential self.endpoint = endpoint self.api_version = "2020-02-01-preview" - self.credential_scopes = ['https://dev.azuresynapse.net/.default'] - self.credential_scopes.extend(kwargs.pop('credential_scopes', [])) + self.credential_scopes = kwargs.pop('credential_scopes', ['https://dev.azuresynapse.net/.default']) kwargs.setdefault('sdk_moniker', 'synapse/{}'.format(VERSION)) self._configure(**kwargs) @@ -62,6 +61,7 @@ def _configure( self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get('http_logging_policy') or policies.HttpLoggingPolicy(**kwargs) self.retry_policy = kwargs.get('retry_policy') or policies.RetryPolicy(**kwargs) self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) self.redirect_policy = kwargs.get('redirect_policy') or policies.RedirectPolicy(**kwargs) diff --git a/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/_metadata.json b/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/_metadata.json deleted file mode 100644 index 2e94b68c65a2..000000000000 --- a/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/_metadata.json +++ /dev/null @@ -1,129 +0,0 @@ -{ - "chosen_version": "2020-02-01-preview", - "total_api_version_list": ["2020-02-01-preview"], - "client": { - "name": "AccessControlClient", - "filename": "_access_control_client", - "description": "AccessControlClient." - }, - "global_parameters": { - "sync_method": { - "credential": { - "method_signature": "credential, # type: \"TokenCredential\"", - "description": "Credential needed for the client to connect to Azure.", - "docstring_type": "~azure.core.credentials.TokenCredential", - "required": true - }, - "endpoint": { - "method_signature": "endpoint, # type: str", - "description": "The workspace development endpoint, for example https://myworkspace.dev.azuresynapse.net.", - "docstring_type": "str", - "required": true - } - }, - "async_method": { - "credential": { - "method_signature": "credential, # type: \"AsyncTokenCredential\"", - "description": "Credential needed for the client to connect to Azure.", - "docstring_type": "~azure.core.credentials_async.AsyncTokenCredential", - "required": true - }, - "endpoint": { - "method_signature": "endpoint, # type: str", - "description": "The workspace development endpoint, for example https://myworkspace.dev.azuresynapse.net.", - "docstring_type": "str", - "required": true - } - }, - "constant": { - }, - "call": "credential, endpoint" - }, - "config": { - "credential": true, - "credential_scopes": ["https://dev.azuresynapse.net/.default"] - }, - "operation_groups": { - }, - "operation_mixins": { - "get_role_definitions" : { - "sync": { - "signature": "def get_role_definitions(\n self,\n **kwargs # type: Any\n):\n" - }, - "async": { - "signature": "def get_role_definitions(\n self,\n **kwargs\n) -\u003e AsyncItemPaged[\"models.RolesListResponse\"]:\n", - "coroutine": false - }, - "doc": " \"\"\"List roles.\n\n:keyword callable cls: A custom type or function that will be passed the direct response\n:return: RolesListResponse, or the result of cls(response)\n:rtype: ~azure.synapse.accesscontrol.models.RolesListResponse\n:raises: ~azure.core.exceptions.HttpResponseError\n\"\"\"", - "call": "" - }, - "get_role_definition_by_id" : { - "sync": { - "signature": "def get_role_definition_by_id(\n self,\n role_id, # type: str\n **kwargs # type: Any\n):\n" - }, - "async": { - "signature": "async def get_role_definition_by_id(\n self,\n role_id: str,\n **kwargs\n) -\u003e \"models.SynapseRole\":\n", - "coroutine": true - }, - "doc": " \"\"\"Get role by role Id.\n\n:param role_id: Synapse Built-In Role Id.\n:type role_id: str\n:keyword callable cls: A custom type or function that will be passed the direct response\n:return: SynapseRole, or the result of cls(response)\n:rtype: ~azure.synapse.accesscontrol.models.SynapseRole\n:raises: ~azure.core.exceptions.HttpResponseError\n\"\"\"", - "call": "role_id" - }, - "create_role_assignment" : { - "sync": { - "signature": "def create_role_assignment(\n self,\n create_role_assignment_options, # type: \"models.RoleAssignmentOptions\"\n **kwargs # type: Any\n):\n" - }, - "async": { - "signature": "async def create_role_assignment(\n self,\n create_role_assignment_options: \"models.RoleAssignmentOptions\",\n **kwargs\n) -\u003e \"models.RoleAssignmentDetails\":\n", - "coroutine": true - }, - "doc": " \"\"\"Create role assignment.\n\n:param create_role_assignment_options: Details of role id and object id.\n:type create_role_assignment_options: ~azure.synapse.accesscontrol.models.RoleAssignmentOptions\n:keyword callable cls: A custom type or function that will be passed the direct response\n:return: RoleAssignmentDetails, or the result of cls(response)\n:rtype: ~azure.synapse.accesscontrol.models.RoleAssignmentDetails\n:raises: ~azure.core.exceptions.HttpResponseError\n\"\"\"", - "call": "create_role_assignment_options" - }, - "get_role_assignments" : { - "sync": { - "signature": "def get_role_assignments(\n self,\n role_id=None, # type: Optional[str]\n principal_id=None, # type: Optional[str]\n continuation_token_parameter=None, # type: Optional[str]\n **kwargs # type: Any\n):\n" - }, - "async": { - "signature": "async def get_role_assignments(\n self,\n role_id: Optional[str] = None,\n principal_id: Optional[str] = None,\n continuation_token_parameter: Optional[str] = None,\n **kwargs\n) -\u003e List[\"models.RoleAssignmentDetails\"]:\n", - "coroutine": true - }, - "doc": " \"\"\"List role assignments.\n\n:param role_id: Synapse Built-In Role Id.\n:type role_id: str\n:param principal_id: Object ID of the AAD principal or security-group.\n:type principal_id: str\n:param continuation_token_parameter: Continuation token.\n:type continuation_token_parameter: str\n:keyword callable cls: A custom type or function that will be passed the direct response\n:return: list of RoleAssignmentDetails, or the result of cls(response)\n:rtype: list[~azure.synapse.accesscontrol.models.RoleAssignmentDetails]\n:raises: ~azure.core.exceptions.HttpResponseError\n\"\"\"", - "call": "role_id, principal_id, continuation_token_parameter" - }, - "get_role_assignment_by_id" : { - "sync": { - "signature": "def get_role_assignment_by_id(\n self,\n role_assignment_id, # type: str\n **kwargs # type: Any\n):\n" - }, - "async": { - "signature": "async def get_role_assignment_by_id(\n self,\n role_assignment_id: str,\n **kwargs\n) -\u003e \"models.RoleAssignmentDetails\":\n", - "coroutine": true - }, - "doc": " \"\"\"Get role assignment by role assignment Id.\n\n:param role_assignment_id: The ID of the role assignment.\n:type role_assignment_id: str\n:keyword callable cls: A custom type or function that will be passed the direct response\n:return: RoleAssignmentDetails, or the result of cls(response)\n:rtype: ~azure.synapse.accesscontrol.models.RoleAssignmentDetails\n:raises: ~azure.core.exceptions.HttpResponseError\n\"\"\"", - "call": "role_assignment_id" - }, - "delete_role_assignment_by_id" : { - "sync": { - "signature": "def delete_role_assignment_by_id(\n self,\n role_assignment_id, # type: str\n **kwargs # type: Any\n):\n" - }, - "async": { - "signature": "async def delete_role_assignment_by_id(\n self,\n role_assignment_id: str,\n **kwargs\n) -\u003e None:\n", - "coroutine": true - }, - "doc": " \"\"\"Delete role assignment by role assignment Id.\n\n:param role_assignment_id: The ID of the role assignment.\n:type role_assignment_id: str\n:keyword callable cls: A custom type or function that will be passed the direct response\n:return: None, or the result of cls(response)\n:rtype: None\n:raises: ~azure.core.exceptions.HttpResponseError\n\"\"\"", - "call": "role_assignment_id" - }, - "get_caller_role_assignments" : { - "sync": { - "signature": "def get_caller_role_assignments(\n self,\n **kwargs # type: Any\n):\n" - }, - "async": { - "signature": "async def get_caller_role_assignments(\n self,\n **kwargs\n) -\u003e List[str]:\n", - "coroutine": true - }, - "doc": " \"\"\"List role assignments of the caller.\n\n:keyword callable cls: A custom type or function that will be passed the direct response\n:return: list of str, or the result of cls(response)\n:rtype: list[str]\n:raises: ~azure.core.exceptions.HttpResponseError\n\"\"\"", - "call": "" - } - }, - "sync_imports": "{\"regular\": {\"azurecore\": {\"azure.core.exceptions\": [\"HttpResponseError\", \"ResourceExistsError\", \"ResourceNotFoundError\", \"map_error\"], \"azure.core.pipeline\": [\"PipelineResponse\"], \"azure.core.pipeline.transport\": [\"HttpRequest\", \"HttpResponse\"], \"azure.core.paging\": [\"ItemPaged\"]}, \"stdlib\": {\"warnings\": [null]}}, \"conditional\": {\"stdlib\": {\"typing\": [\"Any\", \"Callable\", \"Dict\", \"Generic\", \"Iterable\", \"List\", \"Optional\", \"TypeVar\"]}}}", - "async_imports": "{\"regular\": {\"azurecore\": {\"azure.core.exceptions\": [\"HttpResponseError\", \"ResourceExistsError\", \"ResourceNotFoundError\", \"map_error\"], \"azure.core.pipeline\": [\"PipelineResponse\"], \"azure.core.pipeline.transport\": [\"AsyncHttpResponse\", \"HttpRequest\"], \"azure.core.async_paging\": [\"AsyncItemPaged\", \"AsyncList\"]}, \"stdlib\": {\"warnings\": [null]}}, \"conditional\": {\"stdlib\": {\"typing\": [\"Any\", \"AsyncIterable\", \"Callable\", \"Dict\", \"Generic\", \"List\", \"Optional\", \"TypeVar\"]}}}" -} \ No newline at end of file diff --git a/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/_version.py b/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/_version.py index 035146e99a22..eae7c95b6fbd 100644 --- a/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/_version.py +++ b/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/_version.py @@ -6,4 +6,4 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "0.2.0" +VERSION = "0.1.0" diff --git a/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/aio/__init__.py b/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/aio/__init__.py index 3cec25e9e712..8eafa989fcbc 100644 --- a/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/aio/__init__.py +++ b/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/aio/__init__.py @@ -6,5 +6,5 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from ._access_control_client_async import AccessControlClient +from ._access_control_client import AccessControlClient __all__ = ['AccessControlClient'] diff --git a/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/aio/_access_control_client_async.py b/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/aio/_access_control_client.py similarity index 88% rename from sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/aio/_access_control_client_async.py rename to sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/aio/_access_control_client.py index b25236edd254..e5a37cc4a523 100644 --- a/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/aio/_access_control_client_async.py +++ b/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/aio/_access_control_client.py @@ -15,8 +15,8 @@ # pylint: disable=unused-import,ungrouped-imports from azure.core.credentials_async import AsyncTokenCredential -from ._configuration_async import AccessControlClientConfiguration -from .operations_async import AccessControlClientOperationsMixin +from ._configuration import AccessControlClientConfiguration +from .operations import AccessControlClientOperationsMixin from .. import models @@ -27,7 +27,6 @@ class AccessControlClient(AccessControlClientOperationsMixin): :type credential: ~azure.core.credentials_async.AsyncTokenCredential :param endpoint: The workspace development endpoint, for example https://myworkspace.dev.azuresynapse.net. :type endpoint: str - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. """ def __init__( diff --git a/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/aio/_configuration_async.py b/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/aio/_configuration.py similarity index 93% rename from sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/aio/_configuration_async.py rename to sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/aio/_configuration.py index 4c5da8936401..dd26c75218a7 100644 --- a/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/aio/_configuration_async.py +++ b/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/aio/_configuration.py @@ -45,8 +45,7 @@ def __init__( self.credential = credential self.endpoint = endpoint self.api_version = "2020-02-01-preview" - self.credential_scopes = ['https://dev.azuresynapse.net/.default'] - self.credential_scopes.extend(kwargs.pop('credential_scopes', [])) + self.credential_scopes = kwargs.pop('credential_scopes', ['https://dev.azuresynapse.net/.default']) kwargs.setdefault('sdk_moniker', 'synapse/{}'.format(VERSION)) self._configure(**kwargs) @@ -58,6 +57,7 @@ def _configure( self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get('http_logging_policy') or policies.HttpLoggingPolicy(**kwargs) self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs) self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs) diff --git a/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/aio/operations_async/__init__.py b/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/aio/operations/__init__.py similarity index 85% rename from sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/aio/operations_async/__init__.py rename to sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/aio/operations/__init__.py index a13147e030fb..b4127a08ecec 100644 --- a/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/aio/operations_async/__init__.py +++ b/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/aio/operations/__init__.py @@ -6,7 +6,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from ._access_control_client_operations_async import AccessControlClientOperationsMixin +from ._access_control_client_operations import AccessControlClientOperationsMixin __all__ = [ 'AccessControlClientOperationsMixin', diff --git a/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/aio/operations_async/_access_control_client_operations_async.py b/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/aio/operations/_access_control_client_operations.py similarity index 95% rename from sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/aio/operations_async/_access_control_client_operations_async.py rename to sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/aio/operations/_access_control_client_operations.py index 11653dd0c5cd..bf972febb9e6 100644 --- a/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/aio/operations_async/_access_control_client_operations_async.py +++ b/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/aio/operations/_access_control_client_operations.py @@ -35,11 +35,12 @@ def get_role_definitions( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2020-02-01-preview" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -106,6 +107,7 @@ async def get_role_definition_by_id( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2020-02-01-preview" + accept = "application/json" # Construct URL url = self.get_role_definition_by_id.metadata['url'] # type: ignore @@ -121,7 +123,7 @@ async def get_role_definition_by_id( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -159,6 +161,7 @@ async def create_role_assignment( error_map.update(kwargs.pop('error_map', {})) api_version = "2020-02-01-preview" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.create_role_assignment.metadata['url'] # type: ignore @@ -174,13 +177,12 @@ async def create_role_assignment( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(create_role_assignment_options, 'RoleAssignmentOptions') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -221,6 +223,7 @@ async def get_role_assignments( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2020-02-01-preview" + accept = "application/json" # Construct URL url = self.get_role_assignments.metadata['url'] # type: ignore @@ -241,7 +244,7 @@ async def get_role_assignments( header_parameters = {} # type: Dict[str, Any] if continuation_token_parameter is not None: header_parameters['x-ms-continuation'] = self._serialize.header("continuation_token_parameter", continuation_token_parameter, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -280,6 +283,7 @@ async def get_role_assignment_by_id( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2020-02-01-preview" + accept = "application/json" # Construct URL url = self.get_role_assignment_by_id.metadata['url'] # type: ignore @@ -295,7 +299,7 @@ async def get_role_assignment_by_id( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -332,6 +336,7 @@ async def delete_role_assignment_by_id( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2020-02-01-preview" + accept = "application/json" # Construct URL url = self.delete_role_assignment_by_id.metadata['url'] # type: ignore @@ -347,6 +352,7 @@ async def delete_role_assignment_by_id( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -377,6 +383,7 @@ async def get_caller_role_assignments( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2020-02-01-preview" + accept = "application/json" # Construct URL url = self.get_caller_role_assignments.metadata['url'] # type: ignore @@ -391,7 +398,7 @@ async def get_caller_role_assignments( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) diff --git a/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/operations/_access_control_client_operations.py b/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/operations/_access_control_client_operations.py index e27d6d28f4ff..cb69534a9872 100644 --- a/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/operations/_access_control_client_operations.py +++ b/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/operations/_access_control_client_operations.py @@ -40,11 +40,12 @@ def get_role_definitions( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2020-02-01-preview" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -112,6 +113,7 @@ def get_role_definition_by_id( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2020-02-01-preview" + accept = "application/json" # Construct URL url = self.get_role_definition_by_id.metadata['url'] # type: ignore @@ -127,7 +129,7 @@ def get_role_definition_by_id( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -166,6 +168,7 @@ def create_role_assignment( error_map.update(kwargs.pop('error_map', {})) api_version = "2020-02-01-preview" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.create_role_assignment.metadata['url'] # type: ignore @@ -181,13 +184,12 @@ def create_role_assignment( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(create_role_assignment_options, 'RoleAssignmentOptions') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -229,6 +231,7 @@ def get_role_assignments( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2020-02-01-preview" + accept = "application/json" # Construct URL url = self.get_role_assignments.metadata['url'] # type: ignore @@ -249,7 +252,7 @@ def get_role_assignments( header_parameters = {} # type: Dict[str, Any] if continuation_token_parameter is not None: header_parameters['x-ms-continuation'] = self._serialize.header("continuation_token_parameter", continuation_token_parameter, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -289,6 +292,7 @@ def get_role_assignment_by_id( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2020-02-01-preview" + accept = "application/json" # Construct URL url = self.get_role_assignment_by_id.metadata['url'] # type: ignore @@ -304,7 +308,7 @@ def get_role_assignment_by_id( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -342,6 +346,7 @@ def delete_role_assignment_by_id( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2020-02-01-preview" + accept = "application/json" # Construct URL url = self.delete_role_assignment_by_id.metadata['url'] # type: ignore @@ -357,6 +362,7 @@ def delete_role_assignment_by_id( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -388,6 +394,7 @@ def get_caller_role_assignments( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2020-02-01-preview" + accept = "application/json" # Construct URL url = self.get_caller_role_assignments.metadata['url'] # type: ignore @@ -402,7 +409,7 @@ def get_caller_role_assignments( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/__init__.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/__init__.py index 0d7f64d18ff0..03703237ffc5 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/__init__.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/__init__.py @@ -13,7 +13,7 @@ __all__ = ['ArtifactsClient'] try: - from ._patch import patch_sdk + from ._patch import patch_sdk # type: ignore patch_sdk() except ImportError: pass diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/_configuration.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/_configuration.py index 8d63a7bb27dd..5d0aff821595 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/_configuration.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/_configuration.py @@ -48,8 +48,7 @@ def __init__( self.credential = credential self.endpoint = endpoint self.api_version = "2019-06-01-preview" - self.credential_scopes = ['https://dev.azuresynapse.net/.default'] - self.credential_scopes.extend(kwargs.pop('credential_scopes', [])) + self.credential_scopes = kwargs.pop('credential_scopes', ['https://dev.azuresynapse.net/.default']) kwargs.setdefault('sdk_moniker', 'synapse/{}'.format(VERSION)) self._configure(**kwargs) @@ -62,6 +61,7 @@ def _configure( self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get('http_logging_policy') or policies.HttpLoggingPolicy(**kwargs) self.retry_policy = kwargs.get('retry_policy') or policies.RetryPolicy(**kwargs) self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) self.redirect_policy = kwargs.get('redirect_policy') or policies.RedirectPolicy(**kwargs) diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/_metadata.json b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/_metadata.json deleted file mode 100644 index 533beae51bd9..000000000000 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/_metadata.json +++ /dev/null @@ -1,63 +0,0 @@ -{ - "chosen_version": "2019-06-01-preview", - "total_api_version_list": ["2019-06-01-preview"], - "client": { - "name": "ArtifactsClient", - "filename": "_artifacts_client", - "description": "ArtifactsClient." - }, - "global_parameters": { - "sync_method": { - "credential": { - "method_signature": "credential, # type: \"TokenCredential\"", - "description": "Credential needed for the client to connect to Azure.", - "docstring_type": "~azure.core.credentials.TokenCredential", - "required": true - }, - "endpoint": { - "method_signature": "endpoint, # type: str", - "description": "The workspace development endpoint, for example https://myworkspace.dev.azuresynapse.net.", - "docstring_type": "str", - "required": true - } - }, - "async_method": { - "credential": { - "method_signature": "credential, # type: \"AsyncTokenCredential\"", - "description": "Credential needed for the client to connect to Azure.", - "docstring_type": "~azure.core.credentials_async.AsyncTokenCredential", - "required": true - }, - "endpoint": { - "method_signature": "endpoint, # type: str", - "description": "The workspace development endpoint, for example https://myworkspace.dev.azuresynapse.net.", - "docstring_type": "str", - "required": true - } - }, - "constant": { - }, - "call": "credential, endpoint" - }, - "config": { - "credential": true, - "credential_scopes": ["https://dev.azuresynapse.net/.default"] - }, - "operation_groups": { - "linked_service": "LinkedServiceOperations", - "dataset": "DatasetOperations", - "pipeline": "PipelineOperations", - "pipeline_run": "PipelineRunOperations", - "trigger": "TriggerOperations", - "trigger_run": "TriggerRunOperations", - "data_flow": "DataFlowOperations", - "data_flow_debug_session": "DataFlowDebugSessionOperations", - "sql_script": "SqlScriptOperations", - "spark_job_definition": "SparkJobDefinitionOperations", - "notebook": "NotebookOperations" - }, - "operation_mixins": { - }, - "sync_imports": "None", - "async_imports": "None" -} \ No newline at end of file diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/_version.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/_version.py index 035146e99a22..eae7c95b6fbd 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/_version.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/_version.py @@ -6,4 +6,4 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "0.2.0" +VERSION = "0.1.0" diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/__init__.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/__init__.py index 3d76e5630c58..16f882fa0a48 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/__init__.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/__init__.py @@ -6,5 +6,5 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from ._artifacts_client_async import ArtifactsClient +from ._artifacts_client import ArtifactsClient __all__ = ['ArtifactsClient'] diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/_artifacts_client_async.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/_artifacts_client.py similarity index 76% rename from sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/_artifacts_client_async.py rename to sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/_artifacts_client.py index e8f9d7f6c113..76dcfd962d41 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/_artifacts_client_async.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/_artifacts_client.py @@ -15,18 +15,18 @@ # pylint: disable=unused-import,ungrouped-imports from azure.core.credentials_async import AsyncTokenCredential -from ._configuration_async import ArtifactsClientConfiguration -from .operations_async import LinkedServiceOperations -from .operations_async import DatasetOperations -from .operations_async import PipelineOperations -from .operations_async import PipelineRunOperations -from .operations_async import TriggerOperations -from .operations_async import TriggerRunOperations -from .operations_async import DataFlowOperations -from .operations_async import DataFlowDebugSessionOperations -from .operations_async import SqlScriptOperations -from .operations_async import SparkJobDefinitionOperations -from .operations_async import NotebookOperations +from ._configuration import ArtifactsClientConfiguration +from .operations import LinkedServiceOperations +from .operations import DatasetOperations +from .operations import PipelineOperations +from .operations import PipelineRunOperations +from .operations import TriggerOperations +from .operations import TriggerRunOperations +from .operations import DataFlowOperations +from .operations import DataFlowDebugSessionOperations +from .operations import SqlScriptOperations +from .operations import SparkJobDefinitionOperations +from .operations import NotebookOperations from .. import models @@ -34,27 +34,27 @@ class ArtifactsClient(object): """ArtifactsClient. :ivar linked_service: LinkedServiceOperations operations - :vartype linked_service: azure.synapse.artifacts.aio.operations_async.LinkedServiceOperations + :vartype linked_service: azure.synapse.artifacts.aio.operations.LinkedServiceOperations :ivar dataset: DatasetOperations operations - :vartype dataset: azure.synapse.artifacts.aio.operations_async.DatasetOperations + :vartype dataset: azure.synapse.artifacts.aio.operations.DatasetOperations :ivar pipeline: PipelineOperations operations - :vartype pipeline: azure.synapse.artifacts.aio.operations_async.PipelineOperations + :vartype pipeline: azure.synapse.artifacts.aio.operations.PipelineOperations :ivar pipeline_run: PipelineRunOperations operations - :vartype pipeline_run: azure.synapse.artifacts.aio.operations_async.PipelineRunOperations + :vartype pipeline_run: azure.synapse.artifacts.aio.operations.PipelineRunOperations :ivar trigger: TriggerOperations operations - :vartype trigger: azure.synapse.artifacts.aio.operations_async.TriggerOperations + :vartype trigger: azure.synapse.artifacts.aio.operations.TriggerOperations :ivar trigger_run: TriggerRunOperations operations - :vartype trigger_run: azure.synapse.artifacts.aio.operations_async.TriggerRunOperations + :vartype trigger_run: azure.synapse.artifacts.aio.operations.TriggerRunOperations :ivar data_flow: DataFlowOperations operations - :vartype data_flow: azure.synapse.artifacts.aio.operations_async.DataFlowOperations + :vartype data_flow: azure.synapse.artifacts.aio.operations.DataFlowOperations :ivar data_flow_debug_session: DataFlowDebugSessionOperations operations - :vartype data_flow_debug_session: azure.synapse.artifacts.aio.operations_async.DataFlowDebugSessionOperations + :vartype data_flow_debug_session: azure.synapse.artifacts.aio.operations.DataFlowDebugSessionOperations :ivar sql_script: SqlScriptOperations operations - :vartype sql_script: azure.synapse.artifacts.aio.operations_async.SqlScriptOperations + :vartype sql_script: azure.synapse.artifacts.aio.operations.SqlScriptOperations :ivar spark_job_definition: SparkJobDefinitionOperations operations - :vartype spark_job_definition: azure.synapse.artifacts.aio.operations_async.SparkJobDefinitionOperations + :vartype spark_job_definition: azure.synapse.artifacts.aio.operations.SparkJobDefinitionOperations :ivar notebook: NotebookOperations operations - :vartype notebook: azure.synapse.artifacts.aio.operations_async.NotebookOperations + :vartype notebook: azure.synapse.artifacts.aio.operations.NotebookOperations :param credential: Credential needed for the client to connect to Azure. :type credential: ~azure.core.credentials_async.AsyncTokenCredential :param endpoint: The workspace development endpoint, for example https://myworkspace.dev.azuresynapse.net. diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/_configuration_async.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/_configuration.py similarity index 93% rename from sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/_configuration_async.py rename to sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/_configuration.py index 760c01d1cbcb..9c9e4c5fa938 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/_configuration_async.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/_configuration.py @@ -45,8 +45,7 @@ def __init__( self.credential = credential self.endpoint = endpoint self.api_version = "2019-06-01-preview" - self.credential_scopes = ['https://dev.azuresynapse.net/.default'] - self.credential_scopes.extend(kwargs.pop('credential_scopes', [])) + self.credential_scopes = kwargs.pop('credential_scopes', ['https://dev.azuresynapse.net/.default']) kwargs.setdefault('sdk_moniker', 'synapse/{}'.format(VERSION)) self._configure(**kwargs) @@ -58,6 +57,7 @@ def _configure( self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get('http_logging_policy') or policies.HttpLoggingPolicy(**kwargs) self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs) self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs) diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations_async/__init__.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/__init__.py similarity index 52% rename from sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations_async/__init__.py rename to sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/__init__.py index 3ff8957df2ea..b1056111cbe8 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations_async/__init__.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/__init__.py @@ -6,17 +6,17 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from ._linked_service_operations_async import LinkedServiceOperations -from ._dataset_operations_async import DatasetOperations -from ._pipeline_operations_async import PipelineOperations -from ._pipeline_run_operations_async import PipelineRunOperations -from ._trigger_operations_async import TriggerOperations -from ._trigger_run_operations_async import TriggerRunOperations -from ._data_flow_operations_async import DataFlowOperations -from ._data_flow_debug_session_operations_async import DataFlowDebugSessionOperations -from ._sql_script_operations_async import SqlScriptOperations -from ._spark_job_definition_operations_async import SparkJobDefinitionOperations -from ._notebook_operations_async import NotebookOperations +from ._linked_service_operations import LinkedServiceOperations +from ._dataset_operations import DatasetOperations +from ._pipeline_operations import PipelineOperations +from ._pipeline_run_operations import PipelineRunOperations +from ._trigger_operations import TriggerOperations +from ._trigger_run_operations import TriggerRunOperations +from ._data_flow_operations import DataFlowOperations +from ._data_flow_debug_session_operations import DataFlowDebugSessionOperations +from ._sql_script_operations import SqlScriptOperations +from ._spark_job_definition_operations import SparkJobDefinitionOperations +from ._notebook_operations import NotebookOperations __all__ = [ 'LinkedServiceOperations', diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations_async/_data_flow_debug_session_operations_async.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_data_flow_debug_session_operations.py similarity index 97% rename from sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations_async/_data_flow_debug_session_operations_async.py rename to sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_data_flow_debug_session_operations.py index 7da11e64c8be..43601f7bb532 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations_async/_data_flow_debug_session_operations_async.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_data_flow_debug_session_operations.py @@ -52,6 +52,7 @@ async def _create_data_flow_debug_session_initial( error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self._create_data_flow_debug_session_initial.metadata['url'] # type: ignore @@ -67,13 +68,12 @@ async def _create_data_flow_debug_session_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(request, 'CreateDataFlowDebugSessionRequest') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -168,11 +168,12 @@ def query_data_flow_debug_sessions_by_workspace( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -240,6 +241,7 @@ async def add_data_flow( error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.add_data_flow.metadata['url'] # type: ignore @@ -255,13 +257,12 @@ async def add_data_flow( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(request, 'DataFlowDebugPackage') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -297,6 +298,7 @@ async def delete_data_flow_debug_session( error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.delete_data_flow_debug_session.metadata['url'] # type: ignore @@ -312,12 +314,12 @@ async def delete_data_flow_debug_session( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(request, 'DeleteDataFlowDebugSessionRequest') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -341,6 +343,7 @@ async def _execute_command_initial( error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self._execute_command_initial.metadata['url'] # type: ignore @@ -356,13 +359,12 @@ async def _execute_command_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(request, 'DataFlowDebugCommandRequest') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations_async/_data_flow_operations_async.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_data_flow_operations.py similarity index 65% rename from sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations_async/_data_flow_operations_async.py rename to sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_data_flow_operations.py index 3f7c007efd0c..0a58c942e073 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations_async/_data_flow_operations_async.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_data_flow_operations.py @@ -5,13 +5,15 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.polling.async_base_polling import AsyncLROBasePolling from ... import models @@ -40,37 +42,24 @@ def __init__(self, client, config, serializer, deserializer) -> None: self._deserialize = deserializer self._config = config - async def create_or_update_data_flow( + async def _create_or_update_data_flow_initial( self, data_flow_name: str, properties: "models.DataFlow", if_match: Optional[str] = None, **kwargs - ) -> "models.DataFlowResource": - """Creates or updates a data flow. - - :param data_flow_name: The data flow name. - :type data_flow_name: str - :param properties: Data flow properties. - :type properties: ~azure.synapse.artifacts.models.DataFlow - :param if_match: ETag of the data flow entity. Should only be specified for update, for which - it should match existing entity or can be * for unconditional update. - :type if_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: DataFlowResource, or the result of cls(response) - :rtype: ~azure.synapse.artifacts.models.DataFlowResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowResource"] + ) -> Optional["models.DataFlowResource"]: + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.DataFlowResource"]] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) _data_flow = models.DataFlowResource(properties=properties) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL - url = self.create_or_update_data_flow.metadata['url'] # type: ignore + url = self._create_or_update_data_flow_initial.metadata['url'] # type: ignore path_format_arguments = { 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), 'dataFlowName': self._serialize.url("data_flow_name", data_flow_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), @@ -86,28 +75,95 @@ async def create_or_update_data_flow( if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(_data_flow, 'DataFlowResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [200]: + if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.CloudError, response) raise HttpResponseError(response=response, model=error) - deserialized = self._deserialize('DataFlowResource', pipeline_response) + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('DataFlowResource', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - create_or_update_data_flow.metadata = {'url': '/dataflows/{dataFlowName}'} # type: ignore + _create_or_update_data_flow_initial.metadata = {'url': '/dataflows/{dataFlowName}'} # type: ignore + + async def begin_create_or_update_data_flow( + self, + data_flow_name: str, + properties: "models.DataFlow", + if_match: Optional[str] = None, + **kwargs + ) -> AsyncLROPoller["models.DataFlowResource"]: + """Creates or updates a data flow. + + :param data_flow_name: The data flow name. + :type data_flow_name: str + :param properties: Data flow properties. + :type properties: ~azure.synapse.artifacts.models.DataFlow + :param if_match: ETag of the data flow entity. Should only be specified for update, for which + it should match existing entity or can be * for unconditional update. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either DataFlowResource or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.synapse.artifacts.models.DataFlowResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', False) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowResource"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_or_update_data_flow_initial( + data_flow_name=data_flow_name, + properties=properties, + if_match=if_match, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('DataFlowResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: polling_method = AsyncLROBasePolling(lro_delay, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_create_or_update_data_flow.metadata = {'url': '/dataflows/{dataFlowName}'} # type: ignore async def get_data_flow( self, @@ -131,6 +187,7 @@ async def get_data_flow( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self.get_data_flow.metadata['url'] # type: ignore @@ -148,7 +205,7 @@ async def get_data_flow( header_parameters = {} # type: Dict[str, Any] if if_none_match is not None: header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -167,27 +224,19 @@ async def get_data_flow( return deserialized get_data_flow.metadata = {'url': '/dataflows/{dataFlowName}'} # type: ignore - async def delete_data_flow( + async def _delete_data_flow_initial( self, data_flow_name: str, **kwargs ) -> None: - """Deletes a data flow. - - :param data_flow_name: The data flow name. - :type data_flow_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL - url = self.delete_data_flow.metadata['url'] # type: ignore + url = self._delete_data_flow_initial.metadata['url'] # type: ignore path_format_arguments = { 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), 'dataFlowName': self._serialize.url("data_flow_name", data_flow_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), @@ -200,12 +249,13 @@ async def delete_data_flow( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [200, 204]: + if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.CloudError, response) raise HttpResponseError(response=response, model=error) @@ -213,7 +263,61 @@ async def delete_data_flow( if cls: return cls(pipeline_response, None, {}) - delete_data_flow.metadata = {'url': '/dataflows/{dataFlowName}'} # type: ignore + _delete_data_flow_initial.metadata = {'url': '/dataflows/{dataFlowName}'} # type: ignore + + async def begin_delete_data_flow( + self, + data_flow_name: str, + **kwargs + ) -> AsyncLROPoller[None]: + """Deletes a data flow. + + :param data_flow_name: The data flow name. + :type data_flow_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', False) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_data_flow_initial( + data_flow_name=data_flow_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: polling_method = AsyncLROBasePolling(lro_delay, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_delete_data_flow.metadata = {'url': '/dataflows/{dataFlowName}'} # type: ignore def get_data_flows_by_workspace( self, @@ -230,11 +334,12 @@ def get_data_flows_by_workspace( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations_async/_dataset_operations_async.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_dataset_operations.py similarity index 66% rename from sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations_async/_dataset_operations_async.py rename to sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_dataset_operations.py index 1fb9dab66407..589d1d7eb382 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations_async/_dataset_operations_async.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_dataset_operations.py @@ -5,13 +5,15 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.polling.async_base_polling import AsyncLROBasePolling from ... import models @@ -55,11 +57,12 @@ def get_datasets_by_workspace( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -108,37 +111,24 @@ async def get_next(next_link=None): ) get_datasets_by_workspace.metadata = {'url': '/datasets'} # type: ignore - async def create_or_update_dataset( + async def _create_or_update_dataset_initial( self, dataset_name: str, properties: "models.Dataset", if_match: Optional[str] = None, **kwargs - ) -> "models.DatasetResource": - """Creates or updates a dataset. - - :param dataset_name: The dataset name. - :type dataset_name: str - :param properties: Dataset properties. - :type properties: ~azure.synapse.artifacts.models.Dataset - :param if_match: ETag of the dataset entity. Should only be specified for update, for which it - should match existing entity or can be * for unconditional update. - :type if_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: DatasetResource, or the result of cls(response) - :rtype: ~azure.synapse.artifacts.models.DatasetResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.DatasetResource"] + ) -> Optional["models.DatasetResource"]: + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.DatasetResource"]] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) _dataset = models.DatasetResource(properties=properties) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL - url = self.create_or_update_dataset.metadata['url'] # type: ignore + url = self._create_or_update_dataset_initial.metadata['url'] # type: ignore path_format_arguments = { 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), 'datasetName': self._serialize.url("dataset_name", dataset_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), @@ -154,28 +144,95 @@ async def create_or_update_dataset( if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(_dataset, 'DatasetResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [200]: + if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.CloudError, response) raise HttpResponseError(response=response, model=error) - deserialized = self._deserialize('DatasetResource', pipeline_response) + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('DatasetResource', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - create_or_update_dataset.metadata = {'url': '/datasets/{datasetName}'} # type: ignore + _create_or_update_dataset_initial.metadata = {'url': '/datasets/{datasetName}'} # type: ignore + + async def begin_create_or_update_dataset( + self, + dataset_name: str, + properties: "models.Dataset", + if_match: Optional[str] = None, + **kwargs + ) -> AsyncLROPoller["models.DatasetResource"]: + """Creates or updates a dataset. + + :param dataset_name: The dataset name. + :type dataset_name: str + :param properties: Dataset properties. + :type properties: ~azure.synapse.artifacts.models.Dataset + :param if_match: ETag of the dataset entity. Should only be specified for update, for which it + should match existing entity or can be * for unconditional update. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either DatasetResource or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.synapse.artifacts.models.DatasetResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', False) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["models.DatasetResource"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_or_update_dataset_initial( + dataset_name=dataset_name, + properties=properties, + if_match=if_match, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('DatasetResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: polling_method = AsyncLROBasePolling(lro_delay, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_create_or_update_dataset.metadata = {'url': '/datasets/{datasetName}'} # type: ignore async def get_dataset( self, @@ -199,6 +256,7 @@ async def get_dataset( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self.get_dataset.metadata['url'] # type: ignore @@ -216,7 +274,7 @@ async def get_dataset( header_parameters = {} # type: Dict[str, Any] if if_none_match is not None: header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -237,27 +295,19 @@ async def get_dataset( return deserialized get_dataset.metadata = {'url': '/datasets/{datasetName}'} # type: ignore - async def delete_dataset( + async def _delete_dataset_initial( self, dataset_name: str, **kwargs ) -> None: - """Deletes a dataset. - - :param dataset_name: The dataset name. - :type dataset_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL - url = self.delete_dataset.metadata['url'] # type: ignore + url = self._delete_dataset_initial.metadata['url'] # type: ignore path_format_arguments = { 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), 'datasetName': self._serialize.url("dataset_name", dataset_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), @@ -270,12 +320,13 @@ async def delete_dataset( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [200, 204]: + if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.CloudError, response) raise HttpResponseError(response=response, model=error) @@ -283,4 +334,58 @@ async def delete_dataset( if cls: return cls(pipeline_response, None, {}) - delete_dataset.metadata = {'url': '/datasets/{datasetName}'} # type: ignore + _delete_dataset_initial.metadata = {'url': '/datasets/{datasetName}'} # type: ignore + + async def begin_delete_dataset( + self, + dataset_name: str, + **kwargs + ) -> AsyncLROPoller[None]: + """Deletes a dataset. + + :param dataset_name: The dataset name. + :type dataset_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', False) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_dataset_initial( + dataset_name=dataset_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: polling_method = AsyncLROBasePolling(lro_delay, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_delete_dataset.metadata = {'url': '/datasets/{datasetName}'} # type: ignore diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations_async/_linked_service_operations_async.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_linked_service_operations.py similarity index 66% rename from sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations_async/_linked_service_operations_async.py rename to sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_linked_service_operations.py index e4dbb57823fd..53350a408c0a 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations_async/_linked_service_operations_async.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_linked_service_operations.py @@ -5,13 +5,15 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.polling.async_base_polling import AsyncLROBasePolling from ... import models @@ -55,11 +57,12 @@ def get_linked_services_by_workspace( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -108,37 +111,24 @@ async def get_next(next_link=None): ) get_linked_services_by_workspace.metadata = {'url': '/linkedservices'} # type: ignore - async def create_or_update_linked_service( + async def _create_or_update_linked_service_initial( self, linked_service_name: str, properties: "models.LinkedService", if_match: Optional[str] = None, **kwargs - ) -> "models.LinkedServiceResource": - """Creates or updates a linked service. - - :param linked_service_name: The linked service name. - :type linked_service_name: str - :param properties: Properties of linked service. - :type properties: ~azure.synapse.artifacts.models.LinkedService - :param if_match: ETag of the linkedService entity. Should only be specified for update, for - which it should match existing entity or can be * for unconditional update. - :type if_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: LinkedServiceResource, or the result of cls(response) - :rtype: ~azure.synapse.artifacts.models.LinkedServiceResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.LinkedServiceResource"] + ) -> Optional["models.LinkedServiceResource"]: + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.LinkedServiceResource"]] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) _linked_service = models.LinkedServiceResource(properties=properties) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL - url = self.create_or_update_linked_service.metadata['url'] # type: ignore + url = self._create_or_update_linked_service_initial.metadata['url'] # type: ignore path_format_arguments = { 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), 'linkedServiceName': self._serialize.url("linked_service_name", linked_service_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), @@ -154,28 +144,95 @@ async def create_or_update_linked_service( if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(_linked_service, 'LinkedServiceResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [200]: + if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.CloudError, response) raise HttpResponseError(response=response, model=error) - deserialized = self._deserialize('LinkedServiceResource', pipeline_response) + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('LinkedServiceResource', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - create_or_update_linked_service.metadata = {'url': '/linkedservices/{linkedServiceName}'} # type: ignore + _create_or_update_linked_service_initial.metadata = {'url': '/linkedservices/{linkedServiceName}'} # type: ignore + + async def begin_create_or_update_linked_service( + self, + linked_service_name: str, + properties: "models.LinkedService", + if_match: Optional[str] = None, + **kwargs + ) -> AsyncLROPoller["models.LinkedServiceResource"]: + """Creates or updates a linked service. + + :param linked_service_name: The linked service name. + :type linked_service_name: str + :param properties: Properties of linked service. + :type properties: ~azure.synapse.artifacts.models.LinkedService + :param if_match: ETag of the linkedService entity. Should only be specified for update, for + which it should match existing entity or can be * for unconditional update. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either LinkedServiceResource or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.synapse.artifacts.models.LinkedServiceResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', False) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["models.LinkedServiceResource"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_or_update_linked_service_initial( + linked_service_name=linked_service_name, + properties=properties, + if_match=if_match, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('LinkedServiceResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: polling_method = AsyncLROBasePolling(lro_delay, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_create_or_update_linked_service.metadata = {'url': '/linkedservices/{linkedServiceName}'} # type: ignore async def get_linked_service( self, @@ -200,6 +257,7 @@ async def get_linked_service( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self.get_linked_service.metadata['url'] # type: ignore @@ -217,7 +275,7 @@ async def get_linked_service( header_parameters = {} # type: Dict[str, Any] if if_none_match is not None: header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -238,27 +296,19 @@ async def get_linked_service( return deserialized get_linked_service.metadata = {'url': '/linkedservices/{linkedServiceName}'} # type: ignore - async def delete_linked_service( + async def _delete_linked_service_initial( self, linked_service_name: str, **kwargs ) -> None: - """Deletes a linked service. - - :param linked_service_name: The linked service name. - :type linked_service_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL - url = self.delete_linked_service.metadata['url'] # type: ignore + url = self._delete_linked_service_initial.metadata['url'] # type: ignore path_format_arguments = { 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), 'linkedServiceName': self._serialize.url("linked_service_name", linked_service_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), @@ -271,12 +321,13 @@ async def delete_linked_service( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [200, 204]: + if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.CloudError, response) raise HttpResponseError(response=response, model=error) @@ -284,4 +335,58 @@ async def delete_linked_service( if cls: return cls(pipeline_response, None, {}) - delete_linked_service.metadata = {'url': '/linkedservices/{linkedServiceName}'} # type: ignore + _delete_linked_service_initial.metadata = {'url': '/linkedservices/{linkedServiceName}'} # type: ignore + + async def begin_delete_linked_service( + self, + linked_service_name: str, + **kwargs + ) -> AsyncLROPoller[None]: + """Deletes a linked service. + + :param linked_service_name: The linked service name. + :type linked_service_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', False) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_linked_service_initial( + linked_service_name=linked_service_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: polling_method = AsyncLROBasePolling(lro_delay, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_delete_linked_service.metadata = {'url': '/linkedservices/{linkedServiceName}'} # type: ignore diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations_async/_notebook_operations_async.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_notebook_operations.py similarity index 70% rename from sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations_async/_notebook_operations_async.py rename to sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_notebook_operations.py index 68dd787b9868..831087bc47af 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations_async/_notebook_operations_async.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_notebook_operations.py @@ -5,13 +5,15 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.polling.async_base_polling import AsyncLROBasePolling from ... import models @@ -55,11 +57,12 @@ def get_notebooks_by_workspace( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -123,11 +126,12 @@ def get_notebook_summary_by_work_space( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -176,37 +180,24 @@ async def get_next(next_link=None): ) get_notebook_summary_by_work_space.metadata = {'url': '/notebooks/summary'} # type: ignore - async def create_or_update_notebook( + async def _create_or_update_notebook_initial( self, notebook_name: str, properties: "models.Notebook", if_match: Optional[str] = None, **kwargs - ) -> "models.NotebookResource": - """Creates or updates a Note Book. - - :param notebook_name: The notebook name. - :type notebook_name: str - :param properties: Properties of Notebook. - :type properties: ~azure.synapse.artifacts.models.Notebook - :param if_match: ETag of the Note book entity. Should only be specified for update, for which - it should match existing entity or can be * for unconditional update. - :type if_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: NotebookResource, or the result of cls(response) - :rtype: ~azure.synapse.artifacts.models.NotebookResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.NotebookResource"] + ) -> Optional["models.NotebookResource"]: + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.NotebookResource"]] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) _notebook = models.NotebookResource(properties=properties) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL - url = self.create_or_update_notebook.metadata['url'] # type: ignore + url = self._create_or_update_notebook_initial.metadata['url'] # type: ignore path_format_arguments = { 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), 'notebookName': self._serialize.url("notebook_name", notebook_name, 'str'), @@ -222,28 +213,95 @@ async def create_or_update_notebook( if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(_notebook, 'NotebookResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [200]: + if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.CloudError, response) raise HttpResponseError(response=response, model=error) - deserialized = self._deserialize('NotebookResource', pipeline_response) + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('NotebookResource', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - create_or_update_notebook.metadata = {'url': '/notebooks/{notebookName}'} # type: ignore + _create_or_update_notebook_initial.metadata = {'url': '/notebooks/{notebookName}'} # type: ignore + + async def begin_create_or_update_notebook( + self, + notebook_name: str, + properties: "models.Notebook", + if_match: Optional[str] = None, + **kwargs + ) -> AsyncLROPoller["models.NotebookResource"]: + """Creates or updates a Note Book. + + :param notebook_name: The notebook name. + :type notebook_name: str + :param properties: Properties of Notebook. + :type properties: ~azure.synapse.artifacts.models.Notebook + :param if_match: ETag of the Note book entity. Should only be specified for update, for which + it should match existing entity or can be * for unconditional update. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either NotebookResource or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.synapse.artifacts.models.NotebookResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', False) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["models.NotebookResource"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_or_update_notebook_initial( + notebook_name=notebook_name, + properties=properties, + if_match=if_match, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('NotebookResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: polling_method = AsyncLROBasePolling(lro_delay, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_create_or_update_notebook.metadata = {'url': '/notebooks/{notebookName}'} # type: ignore async def get_notebook( self, @@ -267,6 +325,7 @@ async def get_notebook( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self.get_notebook.metadata['url'] # type: ignore @@ -284,7 +343,7 @@ async def get_notebook( header_parameters = {} # type: Dict[str, Any] if if_none_match is not None: header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -305,27 +364,19 @@ async def get_notebook( return deserialized get_notebook.metadata = {'url': '/notebooks/{notebookName}'} # type: ignore - async def delete_notebook( + async def _delete_notebook_initial( self, notebook_name: str, **kwargs ) -> None: - """Deletes a Note book. - - :param notebook_name: The notebook name. - :type notebook_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL - url = self.delete_notebook.metadata['url'] # type: ignore + url = self._delete_notebook_initial.metadata['url'] # type: ignore path_format_arguments = { 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), 'notebookName': self._serialize.url("notebook_name", notebook_name, 'str'), @@ -338,12 +389,13 @@ async def delete_notebook( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [200, 204]: + if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.CloudError, response) raise HttpResponseError(response=response, model=error) @@ -351,4 +403,58 @@ async def delete_notebook( if cls: return cls(pipeline_response, None, {}) - delete_notebook.metadata = {'url': '/notebooks/{notebookName}'} # type: ignore + _delete_notebook_initial.metadata = {'url': '/notebooks/{notebookName}'} # type: ignore + + async def begin_delete_notebook( + self, + notebook_name: str, + **kwargs + ) -> AsyncLROPoller[None]: + """Deletes a Note book. + + :param notebook_name: The notebook name. + :type notebook_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', False) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_notebook_initial( + notebook_name=notebook_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: polling_method = AsyncLROBasePolling(lro_delay, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_delete_notebook.metadata = {'url': '/notebooks/{notebookName}'} # type: ignore diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations_async/_pipeline_operations_async.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_pipeline_operations.py similarity index 71% rename from sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations_async/_pipeline_operations_async.py rename to sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_pipeline_operations.py index 956a5c55db3b..f612490f6172 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations_async/_pipeline_operations_async.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_pipeline_operations.py @@ -5,13 +5,15 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.polling.async_base_polling import AsyncLROBasePolling from ... import models @@ -55,11 +57,12 @@ def get_pipelines_by_workspace( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -108,35 +111,22 @@ async def get_next(next_link=None): ) get_pipelines_by_workspace.metadata = {'url': '/pipelines'} # type: ignore - async def create_or_update_pipeline( + async def _create_or_update_pipeline_initial( self, pipeline_name: str, pipeline: "models.PipelineResource", if_match: Optional[str] = None, **kwargs - ) -> "models.PipelineResource": - """Creates or updates a pipeline. - - :param pipeline_name: The pipeline name. - :type pipeline_name: str - :param pipeline: Pipeline resource definition. - :type pipeline: ~azure.synapse.artifacts.models.PipelineResource - :param if_match: ETag of the pipeline entity. Should only be specified for update, for which - it should match existing entity or can be * for unconditional update. - :type if_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: PipelineResource, or the result of cls(response) - :rtype: ~azure.synapse.artifacts.models.PipelineResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineResource"] + ) -> Optional["models.PipelineResource"]: + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.PipelineResource"]] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL - url = self.create_or_update_pipeline.metadata['url'] # type: ignore + url = self._create_or_update_pipeline_initial.metadata['url'] # type: ignore path_format_arguments = { 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), 'pipelineName': self._serialize.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), @@ -152,28 +142,95 @@ async def create_or_update_pipeline( if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(pipeline, 'PipelineResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [200]: + if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.CloudError, response) raise HttpResponseError(response=response, model=error) - deserialized = self._deserialize('PipelineResource', pipeline_response) + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('PipelineResource', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - create_or_update_pipeline.metadata = {'url': '/pipelines/{pipelineName}'} # type: ignore + _create_or_update_pipeline_initial.metadata = {'url': '/pipelines/{pipelineName}'} # type: ignore + + async def begin_create_or_update_pipeline( + self, + pipeline_name: str, + pipeline: "models.PipelineResource", + if_match: Optional[str] = None, + **kwargs + ) -> AsyncLROPoller["models.PipelineResource"]: + """Creates or updates a pipeline. + + :param pipeline_name: The pipeline name. + :type pipeline_name: str + :param pipeline: Pipeline resource definition. + :type pipeline: ~azure.synapse.artifacts.models.PipelineResource + :param if_match: ETag of the pipeline entity. Should only be specified for update, for which + it should match existing entity or can be * for unconditional update. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either PipelineResource or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.synapse.artifacts.models.PipelineResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', False) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineResource"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_or_update_pipeline_initial( + pipeline_name=pipeline_name, + pipeline=pipeline, + if_match=if_match, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('PipelineResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: polling_method = AsyncLROBasePolling(lro_delay, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_create_or_update_pipeline.metadata = {'url': '/pipelines/{pipelineName}'} # type: ignore async def get_pipeline( self, @@ -197,6 +254,7 @@ async def get_pipeline( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self.get_pipeline.metadata['url'] # type: ignore @@ -214,7 +272,7 @@ async def get_pipeline( header_parameters = {} # type: Dict[str, Any] if if_none_match is not None: header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -235,27 +293,19 @@ async def get_pipeline( return deserialized get_pipeline.metadata = {'url': '/pipelines/{pipelineName}'} # type: ignore - async def delete_pipeline( + async def _delete_pipeline_initial( self, pipeline_name: str, **kwargs ) -> None: - """Deletes a pipeline. - - :param pipeline_name: The pipeline name. - :type pipeline_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL - url = self.delete_pipeline.metadata['url'] # type: ignore + url = self._delete_pipeline_initial.metadata['url'] # type: ignore path_format_arguments = { 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), 'pipelineName': self._serialize.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), @@ -268,12 +318,13 @@ async def delete_pipeline( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [200, 204]: + if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.CloudError, response) raise HttpResponseError(response=response, model=error) @@ -281,7 +332,61 @@ async def delete_pipeline( if cls: return cls(pipeline_response, None, {}) - delete_pipeline.metadata = {'url': '/pipelines/{pipelineName}'} # type: ignore + _delete_pipeline_initial.metadata = {'url': '/pipelines/{pipelineName}'} # type: ignore + + async def begin_delete_pipeline( + self, + pipeline_name: str, + **kwargs + ) -> AsyncLROPoller[None]: + """Deletes a pipeline. + + :param pipeline_name: The pipeline name. + :type pipeline_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', False) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_pipeline_initial( + pipeline_name=pipeline_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: polling_method = AsyncLROBasePolling(lro_delay, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_delete_pipeline.metadata = {'url': '/pipelines/{pipelineName}'} # type: ignore async def create_pipeline_run( self, @@ -318,6 +423,7 @@ async def create_pipeline_run( error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.create_pipeline_run.metadata['url'] # type: ignore @@ -340,7 +446,7 @@ async def create_pipeline_run( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] if parameters is not None: @@ -349,11 +455,10 @@ async def create_pipeline_run( body_content = None body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [200]: + if response.status_code not in [202]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.CloudError, response) raise HttpResponseError(response=response, model=error) diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations_async/_pipeline_run_operations_async.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_pipeline_run_operations.py similarity index 96% rename from sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations_async/_pipeline_run_operations_async.py rename to sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_pipeline_run_operations.py index 966fc280b177..81e09bc3cd17 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations_async/_pipeline_run_operations_async.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_pipeline_run_operations.py @@ -58,6 +58,7 @@ async def query_pipeline_runs_by_workspace( error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.query_pipeline_runs_by_workspace.metadata['url'] # type: ignore @@ -73,13 +74,12 @@ async def query_pipeline_runs_by_workspace( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(filter_parameters, 'RunFilterParameters') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -114,6 +114,7 @@ async def get_pipeline_run( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self.get_pipeline_run.metadata['url'] # type: ignore @@ -129,7 +130,7 @@ async def get_pipeline_run( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -173,6 +174,7 @@ async def query_activity_runs( error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.query_activity_runs.metadata['url'] # type: ignore @@ -190,13 +192,12 @@ async def query_activity_runs( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(filter_parameters, 'RunFilterParameters') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -235,6 +236,7 @@ async def cancel_pipeline_run( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self.cancel_pipeline_run.metadata['url'] # type: ignore @@ -252,6 +254,7 @@ async def cancel_pipeline_run( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations_async/_spark_job_definition_operations_async.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_spark_job_definition_operations.py similarity index 97% rename from sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations_async/_spark_job_definition_operations_async.py rename to sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_spark_job_definition_operations.py index d0e34edc20ed..3308316fb796 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations_async/_spark_job_definition_operations_async.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_spark_job_definition_operations.py @@ -57,11 +57,12 @@ def get_spark_job_definitions_by_workspace( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -138,6 +139,7 @@ async def create_or_update_spark_job_definition( _spark_job_definition = models.SparkJobDefinitionResource(properties=properties) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.create_or_update_spark_job_definition.metadata['url'] # type: ignore @@ -156,13 +158,12 @@ async def create_or_update_spark_job_definition( if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(_spark_job_definition, 'SparkJobDefinitionResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -202,6 +203,7 @@ async def get_spark_job_definition( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self.get_spark_job_definition.metadata['url'] # type: ignore @@ -219,7 +221,7 @@ async def get_spark_job_definition( header_parameters = {} # type: Dict[str, Any] if if_none_match is not None: header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -258,6 +260,7 @@ async def delete_spark_job_definition( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self.delete_spark_job_definition.metadata['url'] # type: ignore @@ -273,6 +276,7 @@ async def delete_spark_job_definition( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -297,6 +301,7 @@ async def _execute_spark_job_definition_initial( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self._execute_spark_job_definition_initial.metadata['url'] # type: ignore @@ -312,7 +317,7 @@ async def _execute_spark_job_definition_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -404,6 +409,7 @@ async def _debug_spark_job_definition_initial( _spark_job_definition_azure_resource = models.SparkJobDefinitionResource(properties=properties) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self._debug_spark_job_definition_initial.metadata['url'] # type: ignore @@ -419,13 +425,12 @@ async def _debug_spark_job_definition_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(_spark_job_definition_azure_resource, 'SparkJobDefinitionResource') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations_async/_sql_script_operations_async.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_sql_script_operations.py similarity index 96% rename from sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations_async/_sql_script_operations_async.py rename to sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_sql_script_operations.py index 8cd3366930fa..5d77468f8919 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations_async/_sql_script_operations_async.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_sql_script_operations.py @@ -55,11 +55,12 @@ def get_sql_scripts_by_workspace( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -136,6 +137,7 @@ async def create_or_update_sql_script( _sql_script = models.SqlScriptResource(properties=properties) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.create_or_update_sql_script.metadata['url'] # type: ignore @@ -154,13 +156,12 @@ async def create_or_update_sql_script( if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(_sql_script, 'SqlScriptResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -199,6 +200,7 @@ async def get_sql_script( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self.get_sql_script.metadata['url'] # type: ignore @@ -216,7 +218,7 @@ async def get_sql_script( header_parameters = {} # type: Dict[str, Any] if if_none_match is not None: header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -255,6 +257,7 @@ async def delete_sql_script( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self.delete_sql_script.metadata['url'] # type: ignore @@ -270,6 +273,7 @@ async def delete_sql_script( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations_async/_trigger_operations_async.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_trigger_operations.py similarity index 83% rename from sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations_async/_trigger_operations_async.py rename to sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_trigger_operations.py index 95d10ac44346..d4ce019823ab 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations_async/_trigger_operations_async.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_trigger_operations.py @@ -57,11 +57,12 @@ def get_triggers_by_workspace( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -110,37 +111,24 @@ async def get_next(next_link=None): ) get_triggers_by_workspace.metadata = {'url': '/triggers'} # type: ignore - async def create_or_update_trigger( + async def _create_or_update_trigger_initial( self, trigger_name: str, properties: "models.Trigger", if_match: Optional[str] = None, **kwargs - ) -> "models.TriggerResource": - """Creates or updates a trigger. - - :param trigger_name: The trigger name. - :type trigger_name: str - :param properties: Properties of the trigger. - :type properties: ~azure.synapse.artifacts.models.Trigger - :param if_match: ETag of the trigger entity. Should only be specified for update, for which it - should match existing entity or can be * for unconditional update. - :type if_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: TriggerResource, or the result of cls(response) - :rtype: ~azure.synapse.artifacts.models.TriggerResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerResource"] + ) -> Optional["models.TriggerResource"]: + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerResource"]] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) _trigger = models.TriggerResource(properties=properties) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL - url = self.create_or_update_trigger.metadata['url'] # type: ignore + url = self._create_or_update_trigger_initial.metadata['url'] # type: ignore path_format_arguments = { 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), @@ -156,28 +144,95 @@ async def create_or_update_trigger( if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(_trigger, 'TriggerResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [200]: + if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.CloudError, response) raise HttpResponseError(response=response, model=error) - deserialized = self._deserialize('TriggerResource', pipeline_response) + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('TriggerResource', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - create_or_update_trigger.metadata = {'url': '/triggers/{triggerName}'} # type: ignore + _create_or_update_trigger_initial.metadata = {'url': '/triggers/{triggerName}'} # type: ignore + + async def begin_create_or_update_trigger( + self, + trigger_name: str, + properties: "models.Trigger", + if_match: Optional[str] = None, + **kwargs + ) -> AsyncLROPoller["models.TriggerResource"]: + """Creates or updates a trigger. + + :param trigger_name: The trigger name. + :type trigger_name: str + :param properties: Properties of the trigger. + :type properties: ~azure.synapse.artifacts.models.Trigger + :param if_match: ETag of the trigger entity. Should only be specified for update, for which it + should match existing entity or can be * for unconditional update. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either TriggerResource or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.synapse.artifacts.models.TriggerResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', False) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerResource"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_or_update_trigger_initial( + trigger_name=trigger_name, + properties=properties, + if_match=if_match, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('TriggerResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: polling_method = AsyncLROBasePolling(lro_delay, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_create_or_update_trigger.metadata = {'url': '/triggers/{triggerName}'} # type: ignore async def get_trigger( self, @@ -201,6 +256,7 @@ async def get_trigger( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self.get_trigger.metadata['url'] # type: ignore @@ -218,7 +274,7 @@ async def get_trigger( header_parameters = {} # type: Dict[str, Any] if if_none_match is not None: header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -239,27 +295,19 @@ async def get_trigger( return deserialized get_trigger.metadata = {'url': '/triggers/{triggerName}'} # type: ignore - async def delete_trigger( + async def _delete_trigger_initial( self, trigger_name: str, **kwargs ) -> None: - """Deletes a trigger. - - :param trigger_name: The trigger name. - :type trigger_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL - url = self.delete_trigger.metadata['url'] # type: ignore + url = self._delete_trigger_initial.metadata['url'] # type: ignore path_format_arguments = { 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), @@ -272,12 +320,13 @@ async def delete_trigger( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [200, 204]: + if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.CloudError, response) raise HttpResponseError(response=response, model=error) @@ -285,7 +334,61 @@ async def delete_trigger( if cls: return cls(pipeline_response, None, {}) - delete_trigger.metadata = {'url': '/triggers/{triggerName}'} # type: ignore + _delete_trigger_initial.metadata = {'url': '/triggers/{triggerName}'} # type: ignore + + async def begin_delete_trigger( + self, + trigger_name: str, + **kwargs + ) -> AsyncLROPoller[None]: + """Deletes a trigger. + + :param trigger_name: The trigger name. + :type trigger_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', False) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_trigger_initial( + trigger_name=trigger_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: polling_method = AsyncLROBasePolling(lro_delay, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_delete_trigger.metadata = {'url': '/triggers/{triggerName}'} # type: ignore async def _subscribe_trigger_to_events_initial( self, @@ -296,6 +399,7 @@ async def _subscribe_trigger_to_events_initial( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self._subscribe_trigger_to_events_initial.metadata['url'] # type: ignore @@ -311,7 +415,7 @@ async def _subscribe_trigger_to_events_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -407,6 +511,7 @@ async def get_event_subscription_status( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self.get_event_subscription_status.metadata['url'] # type: ignore @@ -422,7 +527,7 @@ async def get_event_subscription_status( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -450,6 +555,7 @@ async def _unsubscribe_trigger_from_events_initial( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self._unsubscribe_trigger_from_events_initial.metadata['url'] # type: ignore @@ -465,7 +571,7 @@ async def _unsubscribe_trigger_from_events_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -552,6 +658,7 @@ async def _start_trigger_initial( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self._start_trigger_initial.metadata['url'] # type: ignore @@ -567,6 +674,7 @@ async def _start_trigger_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -645,6 +753,7 @@ async def _stop_trigger_initial( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self._stop_trigger_initial.metadata['url'] # type: ignore @@ -660,6 +769,7 @@ async def _stop_trigger_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations_async/_trigger_run_operations_async.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_trigger_run_operations.py similarity index 71% rename from sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations_async/_trigger_run_operations_async.py rename to sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_trigger_run_operations.py index 8f8a60659f01..c0b76647f6b7 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations_async/_trigger_run_operations_async.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_trigger_run_operations.py @@ -60,6 +60,7 @@ async def rerun_trigger_instance( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self.rerun_trigger_instance.metadata['url'] # type: ignore @@ -76,6 +77,7 @@ async def rerun_trigger_instance( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -91,6 +93,60 @@ async def rerun_trigger_instance( rerun_trigger_instance.metadata = {'url': '/triggers/{triggerName}/triggerRuns/{runId}/rerun'} # type: ignore + async def cancel_trigger_instance( + self, + trigger_name: str, + run_id: str, + **kwargs + ) -> None: + """Cancel single trigger instance by runId. + + :param trigger_name: The trigger name. + :type trigger_name: str + :param run_id: The pipeline run identifier. + :type run_id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2019-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.cancel_trigger_instance.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + 'runId': self._serialize.url("run_id", run_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.CloudError, response) + raise HttpResponseError(response=response, model=error) + + if cls: + return cls(pipeline_response, None, {}) + + cancel_trigger_instance.metadata = {'url': '/triggers/{triggerName}/triggerRuns/{runId}/cancel'} # type: ignore + async def query_trigger_runs_by_workspace( self, filter_parameters: "models.RunFilterParameters", @@ -110,6 +166,7 @@ async def query_trigger_runs_by_workspace( error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.query_trigger_runs_by_workspace.metadata['url'] # type: ignore @@ -125,13 +182,12 @@ async def query_trigger_runs_by_workspace( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(filter_parameters, 'RunFilterParameters') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/__init__.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/__init__.py index 43d23d006cb7..3a3a4dd220cd 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/__init__.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/__init__.py @@ -305,6 +305,8 @@ from ._models_py3 import SparkServiceError from ._models_py3 import SparkServicePlugin from ._models_py3 import SqlConnection + from ._models_py3 import SqlPoolReference + from ._models_py3 import SqlPoolStoredProcedureActivity from ._models_py3 import SqlScript from ._models_py3 import SqlScriptContent from ._models_py3 import SqlScriptMetadata @@ -326,6 +328,10 @@ from ._models_py3 import SwitchCase from ._models_py3 import SybaseLinkedService from ._models_py3 import SybaseTableDataset + from ._models_py3 import SynapseNotebookActivity + from ._models_py3 import SynapseNotebookReference + from ._models_py3 import SynapseSparkJobDefinitionActivity + from ._models_py3 import SynapseSparkJobReference from ._models_py3 import TeradataLinkedService from ._models_py3 import TeradataTableDataset from ._models_py3 import Transformation @@ -659,6 +665,8 @@ from ._models import SparkServiceError # type: ignore from ._models import SparkServicePlugin # type: ignore from ._models import SqlConnection # type: ignore + from ._models import SqlPoolReference # type: ignore + from ._models import SqlPoolStoredProcedureActivity # type: ignore from ._models import SqlScript # type: ignore from ._models import SqlScriptContent # type: ignore from ._models import SqlScriptMetadata # type: ignore @@ -680,6 +688,10 @@ from ._models import SwitchCase # type: ignore from ._models import SybaseLinkedService # type: ignore from ._models import SybaseTableDataset # type: ignore + from ._models import SynapseNotebookActivity # type: ignore + from ._models import SynapseNotebookReference # type: ignore + from ._models import SynapseSparkJobDefinitionActivity # type: ignore + from ._models import SynapseSparkJobReference # type: ignore from ._models import TeradataLinkedService # type: ignore from ._models import TeradataTableDataset # type: ignore from ._models import Transformation # type: ignore @@ -718,15 +730,20 @@ from ._artifacts_client_enums import ( AvroCompressionCodec, AzureFunctionActivityMethod, + BigDataPoolReferenceType, CellOutputType, DataFlowComputeType, + DataFlowReferenceType, DatasetCompressionLevel, + DatasetReferenceType, + Db2AuthenticationType, DelimitedTextCompressionCodec, DependencyCondition, DynamicsAuthenticationType, DynamicsDeploymentType, DynamicsServicePrincipalCredentialType, EventSubscriptionStatus, + ExpressionType, FtpAuthenticationType, GoogleAdWordsAuthenticationType, GoogleBigQueryAuthenticationType, @@ -738,13 +755,16 @@ HiveThriftTransportProtocol, HttpAuthenticationType, ImpalaAuthenticationType, + IntegrationRuntimeReferenceType, MongoDbAuthenticationType, + NotebookReferenceType, ODataAadServicePrincipalCredentialType, ODataAuthenticationType, OrcCompressionCodec, ParameterType, ParquetCompressionCodec, PhoenixAuthenticationType, + PipelineReferenceType, PluginCurrentState, PrestoAuthenticationType, RestServiceAuthenticationType, @@ -759,19 +779,25 @@ SparkAuthenticationType, SparkBatchJobResultType, SparkErrorSource, + SparkJobReferenceType, SparkJobType, SparkServerType, SparkThriftTransportProtocol, SqlConnectionType, + SqlPoolReferenceType, + SqlScriptType, + SsisLogLocationType, SsisPackageLocationType, StoredProcedureParameterType, SybaseAuthenticationType, TeradataAuthenticationType, TriggerRunStatus, TriggerRuntimeState, + Type, VariableType, WebActivityMethod, WebAuthenticationType, + WebHookActivityMethod, ) __all__ = [ @@ -1073,6 +1099,8 @@ 'SparkServiceError', 'SparkServicePlugin', 'SqlConnection', + 'SqlPoolReference', + 'SqlPoolStoredProcedureActivity', 'SqlScript', 'SqlScriptContent', 'SqlScriptMetadata', @@ -1094,6 +1122,10 @@ 'SwitchCase', 'SybaseLinkedService', 'SybaseTableDataset', + 'SynapseNotebookActivity', + 'SynapseNotebookReference', + 'SynapseSparkJobDefinitionActivity', + 'SynapseSparkJobReference', 'TeradataLinkedService', 'TeradataTableDataset', 'Transformation', @@ -1130,15 +1162,20 @@ 'ZohoObjectDataset', 'AvroCompressionCodec', 'AzureFunctionActivityMethod', + 'BigDataPoolReferenceType', 'CellOutputType', 'DataFlowComputeType', + 'DataFlowReferenceType', 'DatasetCompressionLevel', + 'DatasetReferenceType', + 'Db2AuthenticationType', 'DelimitedTextCompressionCodec', 'DependencyCondition', 'DynamicsAuthenticationType', 'DynamicsDeploymentType', 'DynamicsServicePrincipalCredentialType', 'EventSubscriptionStatus', + 'ExpressionType', 'FtpAuthenticationType', 'GoogleAdWordsAuthenticationType', 'GoogleBigQueryAuthenticationType', @@ -1150,13 +1187,16 @@ 'HiveThriftTransportProtocol', 'HttpAuthenticationType', 'ImpalaAuthenticationType', + 'IntegrationRuntimeReferenceType', 'MongoDbAuthenticationType', + 'NotebookReferenceType', 'ODataAadServicePrincipalCredentialType', 'ODataAuthenticationType', 'OrcCompressionCodec', 'ParameterType', 'ParquetCompressionCodec', 'PhoenixAuthenticationType', + 'PipelineReferenceType', 'PluginCurrentState', 'PrestoAuthenticationType', 'RestServiceAuthenticationType', @@ -1171,17 +1211,23 @@ 'SparkAuthenticationType', 'SparkBatchJobResultType', 'SparkErrorSource', + 'SparkJobReferenceType', 'SparkJobType', 'SparkServerType', 'SparkThriftTransportProtocol', 'SqlConnectionType', + 'SqlPoolReferenceType', + 'SqlScriptType', + 'SsisLogLocationType', 'SsisPackageLocationType', 'StoredProcedureParameterType', 'SybaseAuthenticationType', 'TeradataAuthenticationType', 'TriggerRunStatus', 'TriggerRuntimeState', + 'Type', 'VariableType', 'WebActivityMethod', 'WebAuthenticationType', + 'WebHookActivityMethod', ] diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_artifacts_client_enums.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_artifacts_client_enums.py index 133cdabe106e..56dc9d2d4f7b 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_artifacts_client_enums.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_artifacts_client_enums.py @@ -6,485 +6,587 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from enum import Enum - -class AvroCompressionCodec(str, Enum): +from enum import Enum, EnumMeta +from six import with_metaclass + +class _CaseInsensitiveEnumMeta(EnumMeta): + def __getitem__(self, name): + return super().__getitem__(name.upper()) + + def __getattr__(cls, name): + """Return the enum member matching `name` + We use __getattr__ instead of descriptors or inserting into the enum + class' __dict__ in order to support `name` and `value` being both + properties for enum members (which live in the class' __dict__) and + enum members themselves. + """ + try: + return cls._member_map_[name.upper()] + except KeyError: + raise AttributeError(name) + + +class AvroCompressionCodec(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + + NONE = "none" + DEFLATE = "deflate" + SNAPPY = "snappy" + XZ = "xz" + BZIP2 = "bzip2" + +class AzureFunctionActivityMethod(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The list of HTTP methods supported by a AzureFunctionActivity. + """ - none = "none" - deflate = "deflate" - snappy = "snappy" - xz = "xz" - bzip2 = "bzip2" + GET = "GET" + POST = "POST" + PUT = "PUT" + DELETE = "DELETE" + OPTIONS = "OPTIONS" + HEAD = "HEAD" + TRACE = "TRACE" -class AzureFunctionActivityMethod(str, Enum): - """The list of HTTP methods supported by a AzureFunctionActivity. +class BigDataPoolReferenceType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Big data pool reference type. """ - get = "GET" - post = "POST" - put = "PUT" - delete = "DELETE" - options = "OPTIONS" - head = "HEAD" - trace = "TRACE" + BIG_DATA_POOL_REFERENCE = "BigDataPoolReference" -class CellOutputType(str, Enum): +class CellOutputType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Execution, display, or stream outputs. """ - execute_result = "execute_result" - display_data = "display_data" - stream = "stream" - error = "error" + EXECUTE_RESULT = "execute_result" + DISPLAY_DATA = "display_data" + STREAM = "stream" + ERROR = "error" -class DataFlowComputeType(str, Enum): +class DataFlowComputeType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Compute type of the cluster which will execute data flow job. """ - general = "General" - memory_optimized = "MemoryOptimized" - compute_optimized = "ComputeOptimized" + GENERAL = "General" + MEMORY_OPTIMIZED = "MemoryOptimized" + COMPUTE_OPTIMIZED = "ComputeOptimized" + +class DataFlowReferenceType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Data flow reference type. + """ -class DatasetCompressionLevel(str, Enum): + DATA_FLOW_REFERENCE = "DataFlowReference" + +class DatasetCompressionLevel(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """All available compression levels. """ - optimal = "Optimal" - fastest = "Fastest" + OPTIMAL = "Optimal" + FASTEST = "Fastest" + +class DatasetReferenceType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Dataset reference type. + """ + + DATASET_REFERENCE = "DatasetReference" + +class Db2AuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """AuthenticationType to be used for connection. + """ + + BASIC = "Basic" -class DelimitedTextCompressionCodec(str, Enum): +class DelimitedTextCompressionCodec(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - bzip2 = "bzip2" - gzip = "gzip" - deflate = "deflate" - zip_deflate = "zipDeflate" - snappy = "snappy" - lz4 = "lz4" + BZIP2 = "bzip2" + GZIP = "gzip" + DEFLATE = "deflate" + ZIP_DEFLATE = "zipDeflate" + SNAPPY = "snappy" + LZ4 = "lz4" -class DependencyCondition(str, Enum): +class DependencyCondition(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - succeeded = "Succeeded" - failed = "Failed" - skipped = "Skipped" - completed = "Completed" + SUCCEEDED = "Succeeded" + FAILED = "Failed" + SKIPPED = "Skipped" + COMPLETED = "Completed" -class DynamicsAuthenticationType(str, Enum): +class DynamicsAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The authentication type to connect to Dynamics server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or Expression with resultType string). """ - office365 = "Office365" - ifd = "Ifd" - aad_service_principal = "AADServicePrincipal" + OFFICE365 = "Office365" + IFD = "Ifd" + AAD_SERVICE_PRINCIPAL = "AADServicePrincipal" -class DynamicsDeploymentType(str, Enum): +class DynamicsDeploymentType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The deployment type of the Dynamics instance. 'Online' for Dynamics Online and 'OnPremisesWithIfd' for Dynamics on-premises with Ifd. Type: string (or Expression with resultType string). """ - online = "Online" - on_premises_with_ifd = "OnPremisesWithIfd" + ONLINE = "Online" + ON_PREMISES_WITH_IFD = "OnPremisesWithIfd" -class DynamicsServicePrincipalCredentialType(str, Enum): +class DynamicsServicePrincipalCredentialType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or Expression with resultType string). """ - service_principal_key = "ServicePrincipalKey" - service_principal_cert = "ServicePrincipalCert" + SERVICE_PRINCIPAL_KEY = "ServicePrincipalKey" + SERVICE_PRINCIPAL_CERT = "ServicePrincipalCert" -class EventSubscriptionStatus(str, Enum): +class EventSubscriptionStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Event Subscription Status. """ - enabled = "Enabled" - provisioning = "Provisioning" - deprovisioning = "Deprovisioning" - disabled = "Disabled" - unknown = "Unknown" + ENABLED = "Enabled" + PROVISIONING = "Provisioning" + DEPROVISIONING = "Deprovisioning" + DISABLED = "Disabled" + UNKNOWN = "Unknown" + +class ExpressionType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Expression type. + """ + + EXPRESSION = "Expression" -class FtpAuthenticationType(str, Enum): +class FtpAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The authentication type to be used to connect to the FTP server. """ - basic = "Basic" - anonymous = "Anonymous" + BASIC = "Basic" + ANONYMOUS = "Anonymous" -class GoogleAdWordsAuthenticationType(str, Enum): +class GoogleAdWordsAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The OAuth 2.0 authentication mechanism used for authentication. ServiceAuthentication can only be used on self-hosted IR. """ - service_authentication = "ServiceAuthentication" - user_authentication = "UserAuthentication" + SERVICE_AUTHENTICATION = "ServiceAuthentication" + USER_AUTHENTICATION = "UserAuthentication" -class GoogleBigQueryAuthenticationType(str, Enum): +class GoogleBigQueryAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The OAuth 2.0 authentication mechanism used for authentication. ServiceAuthentication can only be used on self-hosted IR. """ - service_authentication = "ServiceAuthentication" - user_authentication = "UserAuthentication" + SERVICE_AUTHENTICATION = "ServiceAuthentication" + USER_AUTHENTICATION = "UserAuthentication" -class HBaseAuthenticationType(str, Enum): +class HBaseAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The authentication mechanism to use to connect to the HBase server. """ - anonymous = "Anonymous" - basic = "Basic" + ANONYMOUS = "Anonymous" + BASIC = "Basic" -class HdiNodeTypes(str, Enum): +class HdiNodeTypes(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The node types on which the script action should be executed. """ - headnode = "Headnode" - workernode = "Workernode" - zookeeper = "Zookeeper" + HEADNODE = "Headnode" + WORKERNODE = "Workernode" + ZOOKEEPER = "Zookeeper" -class HDInsightActivityDebugInfoOption(str, Enum): +class HDInsightActivityDebugInfoOption(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The HDInsightActivityDebugInfoOption settings to use. """ - none = "None" - always = "Always" - failure = "Failure" + NONE = "None" + ALWAYS = "Always" + FAILURE = "Failure" -class HiveAuthenticationType(str, Enum): +class HiveAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The authentication method used to access the Hive server. """ - anonymous = "Anonymous" - username = "Username" - username_and_password = "UsernameAndPassword" - windows_azure_hd_insight_service = "WindowsAzureHDInsightService" + ANONYMOUS = "Anonymous" + USERNAME = "Username" + USERNAME_AND_PASSWORD = "UsernameAndPassword" + WINDOWS_AZURE_HD_INSIGHT_SERVICE = "WindowsAzureHDInsightService" -class HiveServerType(str, Enum): +class HiveServerType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The type of Hive server. """ - hive_server1 = "HiveServer1" - hive_server2 = "HiveServer2" - hive_thrift_server = "HiveThriftServer" + HIVE_SERVER1 = "HiveServer1" + HIVE_SERVER2 = "HiveServer2" + HIVE_THRIFT_SERVER = "HiveThriftServer" -class HiveThriftTransportProtocol(str, Enum): +class HiveThriftTransportProtocol(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The transport protocol to use in the Thrift layer. """ - binary = "Binary" - sasl = "SASL" - http = "HTTP " + BINARY = "Binary" + SASL = "SASL" + HTTP = "HTTP " -class HttpAuthenticationType(str, Enum): +class HttpAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The authentication type to be used to connect to the HTTP server. """ - basic = "Basic" - anonymous = "Anonymous" - digest = "Digest" - windows = "Windows" - client_certificate = "ClientCertificate" + BASIC = "Basic" + ANONYMOUS = "Anonymous" + DIGEST = "Digest" + WINDOWS = "Windows" + CLIENT_CERTIFICATE = "ClientCertificate" -class ImpalaAuthenticationType(str, Enum): +class ImpalaAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The authentication type to use. """ - anonymous = "Anonymous" - sasl_username = "SASLUsername" - username_and_password = "UsernameAndPassword" + ANONYMOUS = "Anonymous" + SASL_USERNAME = "SASLUsername" + USERNAME_AND_PASSWORD = "UsernameAndPassword" -class MongoDbAuthenticationType(str, Enum): +class IntegrationRuntimeReferenceType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Type of integration runtime. + """ + + INTEGRATION_RUNTIME_REFERENCE = "IntegrationRuntimeReference" + +class MongoDbAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The authentication type to be used to connect to the MongoDB database. """ - basic = "Basic" - anonymous = "Anonymous" + BASIC = "Basic" + ANONYMOUS = "Anonymous" + +class NotebookReferenceType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Synapse notebook reference type. + """ + + NOTEBOOK_REFERENCE = "NotebookReference" -class ODataAadServicePrincipalCredentialType(str, Enum): +class ODataAadServicePrincipalCredentialType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Specify the credential type (key or cert) is used for service principal. """ - service_principal_key = "ServicePrincipalKey" - service_principal_cert = "ServicePrincipalCert" + SERVICE_PRINCIPAL_KEY = "ServicePrincipalKey" + SERVICE_PRINCIPAL_CERT = "ServicePrincipalCert" -class ODataAuthenticationType(str, Enum): +class ODataAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Type of authentication used to connect to the OData service. """ - basic = "Basic" - anonymous = "Anonymous" - windows = "Windows" - aad_service_principal = "AadServicePrincipal" - managed_service_identity = "ManagedServiceIdentity" + BASIC = "Basic" + ANONYMOUS = "Anonymous" + WINDOWS = "Windows" + AAD_SERVICE_PRINCIPAL = "AadServicePrincipal" + MANAGED_SERVICE_IDENTITY = "ManagedServiceIdentity" -class OrcCompressionCodec(str, Enum): +class OrcCompressionCodec(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - none = "none" - zlib = "zlib" - snappy = "snappy" + NONE = "none" + ZLIB = "zlib" + SNAPPY = "snappy" -class ParameterType(str, Enum): +class ParameterType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Parameter type. """ - object = "Object" - string = "String" - int = "Int" - float = "Float" - bool = "Bool" - array = "Array" - secure_string = "SecureString" + OBJECT = "Object" + STRING = "String" + INT = "Int" + FLOAT = "Float" + BOOL = "Bool" + ARRAY = "Array" + SECURE_STRING = "SecureString" -class ParquetCompressionCodec(str, Enum): +class ParquetCompressionCodec(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - none = "none" - gzip = "gzip" - snappy = "snappy" - lzo = "lzo" + NONE = "none" + GZIP = "gzip" + SNAPPY = "snappy" + LZO = "lzo" -class PhoenixAuthenticationType(str, Enum): +class PhoenixAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The authentication mechanism used to connect to the Phoenix server. """ - anonymous = "Anonymous" - username_and_password = "UsernameAndPassword" - windows_azure_hd_insight_service = "WindowsAzureHDInsightService" + ANONYMOUS = "Anonymous" + USERNAME_AND_PASSWORD = "UsernameAndPassword" + WINDOWS_AZURE_HD_INSIGHT_SERVICE = "WindowsAzureHDInsightService" + +class PipelineReferenceType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Pipeline reference type. + """ + + PIPELINE_REFERENCE = "PipelineReference" -class PluginCurrentState(str, Enum): +class PluginCurrentState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - preparation = "Preparation" - resource_acquisition = "ResourceAcquisition" - queued = "Queued" - submission = "Submission" - monitoring = "Monitoring" - cleanup = "Cleanup" - ended = "Ended" + PREPARATION = "Preparation" + RESOURCE_ACQUISITION = "ResourceAcquisition" + QUEUED = "Queued" + SUBMISSION = "Submission" + MONITORING = "Monitoring" + CLEANUP = "Cleanup" + ENDED = "Ended" -class PrestoAuthenticationType(str, Enum): +class PrestoAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The authentication mechanism used to connect to the Presto server. """ - anonymous = "Anonymous" - ldap = "LDAP" + ANONYMOUS = "Anonymous" + LDAP = "LDAP" -class RestServiceAuthenticationType(str, Enum): +class RestServiceAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Type of authentication used to connect to the REST service. """ - anonymous = "Anonymous" - basic = "Basic" - aad_service_principal = "AadServicePrincipal" - managed_service_identity = "ManagedServiceIdentity" + ANONYMOUS = "Anonymous" + BASIC = "Basic" + AAD_SERVICE_PRINCIPAL = "AadServicePrincipal" + MANAGED_SERVICE_IDENTITY = "ManagedServiceIdentity" -class RunQueryFilterOperand(str, Enum): +class RunQueryFilterOperand(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Parameter name to be used for filter. The allowed operands to query pipeline runs are PipelineName, RunStart, RunEnd and Status; to query activity runs are ActivityName, ActivityRunStart, ActivityRunEnd, ActivityType and Status, and to query trigger runs are TriggerName, TriggerRunTimestamp and Status. """ - pipeline_name = "PipelineName" - status = "Status" - run_start = "RunStart" - run_end = "RunEnd" - activity_name = "ActivityName" - activity_run_start = "ActivityRunStart" - activity_run_end = "ActivityRunEnd" - activity_type = "ActivityType" - trigger_name = "TriggerName" - trigger_run_timestamp = "TriggerRunTimestamp" - run_group_id = "RunGroupId" - latest_only = "LatestOnly" - -class RunQueryFilterOperator(str, Enum): + PIPELINE_NAME = "PipelineName" + STATUS = "Status" + RUN_START = "RunStart" + RUN_END = "RunEnd" + ACTIVITY_NAME = "ActivityName" + ACTIVITY_RUN_START = "ActivityRunStart" + ACTIVITY_RUN_END = "ActivityRunEnd" + ACTIVITY_TYPE = "ActivityType" + TRIGGER_NAME = "TriggerName" + TRIGGER_RUN_TIMESTAMP = "TriggerRunTimestamp" + RUN_GROUP_ID = "RunGroupId" + LATEST_ONLY = "LatestOnly" + +class RunQueryFilterOperator(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Operator to be used for filter. """ - equals = "Equals" - not_equals = "NotEquals" - in_enum = "In" - not_in = "NotIn" + EQUALS = "Equals" + NOT_EQUALS = "NotEquals" + IN_ENUM = "In" + NOT_IN = "NotIn" -class RunQueryOrder(str, Enum): +class RunQueryOrder(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Sorting order of the parameter. """ - asc = "ASC" - desc = "DESC" + ASC = "ASC" + DESC = "DESC" -class RunQueryOrderByField(str, Enum): +class RunQueryOrderByField(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Parameter name to be used for order by. The allowed parameters to order by for pipeline runs are PipelineName, RunStart, RunEnd and Status; for activity runs are ActivityName, ActivityRunStart, ActivityRunEnd and Status; for trigger runs are TriggerName, TriggerRunTimestamp and Status. """ - run_start = "RunStart" - run_end = "RunEnd" - pipeline_name = "PipelineName" - status = "Status" - activity_name = "ActivityName" - activity_run_start = "ActivityRunStart" - activity_run_end = "ActivityRunEnd" - trigger_name = "TriggerName" - trigger_run_timestamp = "TriggerRunTimestamp" + RUN_START = "RunStart" + RUN_END = "RunEnd" + PIPELINE_NAME = "PipelineName" + STATUS = "Status" + ACTIVITY_NAME = "ActivityName" + ACTIVITY_RUN_START = "ActivityRunStart" + ACTIVITY_RUN_END = "ActivityRunEnd" + TRIGGER_NAME = "TriggerName" + TRIGGER_RUN_TIMESTAMP = "TriggerRunTimestamp" -class SapHanaAuthenticationType(str, Enum): +class SapHanaAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The authentication type to be used to connect to the SAP HANA server. """ - basic = "Basic" - windows = "Windows" + BASIC = "Basic" + WINDOWS = "Windows" -class SchedulerCurrentState(str, Enum): +class SchedulerCurrentState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - queued = "Queued" - scheduled = "Scheduled" - ended = "Ended" + QUEUED = "Queued" + SCHEDULED = "Scheduled" + ENDED = "Ended" -class ServiceNowAuthenticationType(str, Enum): +class ServiceNowAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The authentication type to use. """ - basic = "Basic" - o_auth2 = "OAuth2" + BASIC = "Basic" + O_AUTH2 = "OAuth2" -class SftpAuthenticationType(str, Enum): +class SftpAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The authentication type to be used to connect to the FTP server. """ - basic = "Basic" - ssh_public_key = "SshPublicKey" + BASIC = "Basic" + SSH_PUBLIC_KEY = "SshPublicKey" -class SparkAuthenticationType(str, Enum): +class SparkAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The authentication method used to access the Spark server. """ - anonymous = "Anonymous" - username = "Username" - username_and_password = "UsernameAndPassword" - windows_azure_hd_insight_service = "WindowsAzureHDInsightService" + ANONYMOUS = "Anonymous" + USERNAME = "Username" + USERNAME_AND_PASSWORD = "UsernameAndPassword" + WINDOWS_AZURE_HD_INSIGHT_SERVICE = "WindowsAzureHDInsightService" -class SparkBatchJobResultType(str, Enum): +class SparkBatchJobResultType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The Spark batch job result. """ - uncertain = "Uncertain" - succeeded = "Succeeded" - failed = "Failed" - cancelled = "Cancelled" + UNCERTAIN = "Uncertain" + SUCCEEDED = "Succeeded" + FAILED = "Failed" + CANCELLED = "Cancelled" + +class SparkErrorSource(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): -class SparkErrorSource(str, Enum): + SYSTEM = "System" + USER = "User" + UNKNOWN = "Unknown" + DEPENDENCY = "Dependency" - system = "System" - user = "User" - unknown = "Unknown" - dependency = "Dependency" +class SparkJobReferenceType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Synapse spark job reference type. + """ -class SparkJobType(str, Enum): + SPARK_JOB_DEFINITION_REFERENCE = "SparkJobDefinitionReference" + +class SparkJobType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The job type. """ - spark_batch = "SparkBatch" - spark_session = "SparkSession" + SPARK_BATCH = "SparkBatch" + SPARK_SESSION = "SparkSession" -class SparkServerType(str, Enum): +class SparkServerType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The type of Spark server. """ - shark_server = "SharkServer" - shark_server2 = "SharkServer2" - spark_thrift_server = "SparkThriftServer" + SHARK_SERVER = "SharkServer" + SHARK_SERVER2 = "SharkServer2" + SPARK_THRIFT_SERVER = "SparkThriftServer" -class SparkThriftTransportProtocol(str, Enum): +class SparkThriftTransportProtocol(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The transport protocol to use in the Thrift layer. """ - binary = "Binary" - sasl = "SASL" - http = "HTTP " + BINARY = "Binary" + SASL = "SASL" + HTTP = "HTTP " -class SqlConnectionType(str, Enum): +class SqlConnectionType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The type of the connection. """ - sql_on_demand = "SqlOnDemand" - sql_pool = "SqlPool" + SQL_ON_DEMAND = "SqlOnDemand" + SQL_POOL = "SqlPool" + +class SqlPoolReferenceType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """SQL pool reference type. + """ + + SQL_POOL_REFERENCE = "SqlPoolReference" + +class SqlScriptType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The type of the SQL script. + """ + + SQL_QUERY = "SqlQuery" + +class SsisLogLocationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The type of SSIS log location. + """ + + FILE = "File" -class SsisPackageLocationType(str, Enum): +class SsisPackageLocationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The type of SSIS package location. """ - ssisdb = "SSISDB" - file = "File" - inline_package = "InlinePackage" + SSISDB = "SSISDB" + FILE = "File" + INLINE_PACKAGE = "InlinePackage" -class StoredProcedureParameterType(str, Enum): +class StoredProcedureParameterType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Stored procedure parameter type. """ - string = "String" - int = "Int" - int64 = "Int64" - decimal = "Decimal" - guid = "Guid" - boolean = "Boolean" - date = "Date" + STRING = "String" + INT = "Int" + INT64 = "Int64" + DECIMAL = "Decimal" + GUID = "Guid" + BOOLEAN = "Boolean" + DATE = "Date" -class SybaseAuthenticationType(str, Enum): +class SybaseAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """AuthenticationType to be used for connection. """ - basic = "Basic" - windows = "Windows" + BASIC = "Basic" + WINDOWS = "Windows" -class TeradataAuthenticationType(str, Enum): +class TeradataAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """AuthenticationType to be used for connection. """ - basic = "Basic" - windows = "Windows" + BASIC = "Basic" + WINDOWS = "Windows" -class TriggerRunStatus(str, Enum): +class TriggerRunStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Trigger run status. """ - succeeded = "Succeeded" - failed = "Failed" - inprogress = "Inprogress" + SUCCEEDED = "Succeeded" + FAILED = "Failed" + INPROGRESS = "Inprogress" -class TriggerRuntimeState(str, Enum): +class TriggerRuntimeState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Enumerates possible state of Triggers. """ - started = "Started" - stopped = "Stopped" - disabled = "Disabled" + STARTED = "Started" + STOPPED = "Stopped" + DISABLED = "Disabled" -class VariableType(str, Enum): +class Type(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Linked service reference type. + """ + + LINKED_SERVICE_REFERENCE = "LinkedServiceReference" + +class VariableType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Variable type. """ - string = "String" - bool = "Bool" - boolean = "Boolean" - array = "Array" + STRING = "String" + BOOL = "Bool" + BOOLEAN = "Boolean" + ARRAY = "Array" -class WebActivityMethod(str, Enum): +class WebActivityMethod(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The list of HTTP methods supported by a WebActivity. """ - get = "GET" - post = "POST" - put = "PUT" - delete = "DELETE" + GET = "GET" + POST = "POST" + PUT = "PUT" + DELETE = "DELETE" -class WebAuthenticationType(str, Enum): +class WebAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Type of authentication used to connect to the web table source. """ - basic = "Basic" - anonymous = "Anonymous" - client_certificate = "ClientCertificate" + BASIC = "Basic" + ANONYMOUS = "Anonymous" + CLIENT_CERTIFICATE = "ClientCertificate" + +class WebHookActivityMethod(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The list of HTTP methods supported by a WebHook activity. + """ + + POST = "POST" diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_models.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_models.py index 5d05dbaa3302..6bb2ecb9828a 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_models.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_models.py @@ -14,7 +14,7 @@ class Activity(msrest.serialization.Model): """A pipeline activity. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AppendVariableActivity, ControlActivity, ExecutePipelineActivity, ExecutionActivity, FilterActivity, ForEachActivity, IfConditionActivity, SetVariableActivity, SwitchActivity, UntilActivity, ValidationActivity, WaitActivity, WebHookActivity. + sub-classes are: AppendVariableActivity, ControlActivity, ExecutePipelineActivity, ExecutionActivity, FilterActivity, ForEachActivity, IfConditionActivity, SetVariableActivity, SynapseSparkJobDefinitionActivity, SqlPoolStoredProcedureActivity, SwitchActivity, SynapseNotebookActivity, UntilActivity, ValidationActivity, WaitActivity, WebHookActivity. All required parameters must be populated in order to send to Azure. @@ -48,7 +48,7 @@ class Activity(msrest.serialization.Model): } _subtype_map = { - 'type': {'AppendVariable': 'AppendVariableActivity', 'Container': 'ControlActivity', 'ExecutePipeline': 'ExecutePipelineActivity', 'Execution': 'ExecutionActivity', 'Filter': 'FilterActivity', 'ForEach': 'ForEachActivity', 'IfCondition': 'IfConditionActivity', 'SetVariable': 'SetVariableActivity', 'Switch': 'SwitchActivity', 'Until': 'UntilActivity', 'Validation': 'ValidationActivity', 'Wait': 'WaitActivity', 'WebHook': 'WebHookActivity'} + 'type': {'AppendVariable': 'AppendVariableActivity', 'Container': 'ControlActivity', 'ExecutePipeline': 'ExecutePipelineActivity', 'Execution': 'ExecutionActivity', 'Filter': 'FilterActivity', 'ForEach': 'ForEachActivity', 'IfCondition': 'IfConditionActivity', 'SetVariable': 'SetVariableActivity', 'SparkJob': 'SynapseSparkJobDefinitionActivity', 'SqlPoolStoredProcedure': 'SqlPoolStoredProcedureActivity', 'Switch': 'SwitchActivity', 'SynapseNotebook': 'SynapseNotebookActivity', 'Until': 'UntilActivity', 'Validation': 'ValidationActivity', 'Wait': 'WaitActivity', 'WebHook': 'WebHookActivity'} } def __init__( @@ -58,7 +58,7 @@ def __init__( super(Activity, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) self.name = kwargs['name'] - self.type = 'Activity' + self.type = 'Activity' # type: str self.description = kwargs.get('description', None) self.depends_on = kwargs.get('depends_on', None) self.user_properties = kwargs.get('user_properties', None) @@ -334,7 +334,7 @@ def __init__( ): super(LinkedService, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) - self.type = 'LinkedService' + self.type = 'LinkedService' # type: str self.connect_via = kwargs.get('connect_via', None) self.description = kwargs.get('description', None) self.parameters = kwargs.get('parameters', None) @@ -422,7 +422,7 @@ def __init__( **kwargs ): super(AmazonMWSLinkedService, self).__init__(**kwargs) - self.type = 'AmazonMWS' + self.type = 'AmazonMWS' # type: str self.endpoint = kwargs['endpoint'] self.marketplace_id = kwargs['marketplace_id'] self.seller_id = kwargs['seller_id'] @@ -494,7 +494,7 @@ def __init__( ): super(Dataset, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) - self.type = 'Dataset' + self.type = 'Dataset' # type: str self.description = kwargs.get('description', None) self.structure = kwargs.get('structure', None) self.schema = kwargs.get('schema', None) @@ -558,7 +558,7 @@ def __init__( **kwargs ): super(AmazonMWSObjectDataset, self).__init__(**kwargs) - self.type = 'AmazonMWSObject' + self.type = 'AmazonMWSObject' # type: str self.table_name = kwargs.get('table_name', None) @@ -626,7 +626,7 @@ def __init__( **kwargs ): super(AmazonRedshiftLinkedService, self).__init__(**kwargs) - self.type = 'AmazonRedshift' + self.type = 'AmazonRedshift' # type: str self.server = kwargs['server'] self.username = kwargs.get('username', None) self.password = kwargs.get('password', None) @@ -698,7 +698,7 @@ def __init__( **kwargs ): super(AmazonRedshiftTableDataset, self).__init__(**kwargs) - self.type = 'AmazonRedshiftTable' + self.type = 'AmazonRedshiftTable' # type: str self.table_name = kwargs.get('table_name', None) self.table = kwargs.get('table', None) self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) @@ -760,7 +760,7 @@ def __init__( **kwargs ): super(AmazonS3LinkedService, self).__init__(**kwargs) - self.type = 'AmazonS3' + self.type = 'AmazonS3' # type: str self.access_key_id = kwargs.get('access_key_id', None) self.secret_access_key = kwargs.get('secret_access_key', None) self.service_url = kwargs.get('service_url', None) @@ -812,7 +812,7 @@ def __init__( **kwargs ): super(AppendVariableActivity, self).__init__(**kwargs) - self.type = 'AppendVariable' + self.type = 'AppendVariable' # type: str self.variable_name = kwargs.get('variable_name', None) self.value = kwargs.get('value', None) @@ -879,7 +879,7 @@ def __init__( **kwargs ): super(AvroDataset, self).__init__(**kwargs) - self.type = 'Avro' + self.type = 'Avro' # type: str self.location = kwargs.get('location', None) self.avro_compression_codec = kwargs.get('avro_compression_codec', None) self.avro_compression_level = kwargs.get('avro_compression_level', None) @@ -950,7 +950,7 @@ def __init__( **kwargs ): super(AzureBatchLinkedService, self).__init__(**kwargs) - self.type = 'AzureBatch' + self.type = 'AzureBatch' # type: str self.account_name = kwargs['account_name'] self.access_key = kwargs.get('access_key', None) self.batch_uri = kwargs['batch_uri'] @@ -1023,7 +1023,7 @@ def __init__( **kwargs ): super(AzureBlobFSLinkedService, self).__init__(**kwargs) - self.type = 'AzureBlobFS' + self.type = 'AzureBlobFS' # type: str self.url = kwargs['url'] self.account_key = kwargs.get('account_key', None) self.service_principal_id = kwargs.get('service_principal_id', None) @@ -1106,7 +1106,7 @@ def __init__( **kwargs ): super(AzureBlobStorageLinkedService, self).__init__(**kwargs) - self.type = 'AzureBlobStorage' + self.type = 'AzureBlobStorage' # type: str self.connection_string = kwargs.get('connection_string', None) self.account_key = kwargs.get('account_key', None) self.sas_uri = kwargs.get('sas_uri', None) @@ -1225,7 +1225,7 @@ def __init__( **kwargs ): super(AzureDatabricksLinkedService, self).__init__(**kwargs) - self.type = 'AzureDatabricks' + self.type = 'AzureDatabricks' # type: str self.domain = kwargs['domain'] self.access_token = kwargs['access_token'] self.existing_cluster_id = kwargs.get('existing_cluster_id', None) @@ -1294,7 +1294,7 @@ def __init__( **kwargs ): super(ExecutionActivity, self).__init__(**kwargs) - self.type = 'Execution' + self.type = 'Execution' # type: str self.linked_service_name = kwargs.get('linked_service_name', None) self.policy = kwargs.get('policy', None) @@ -1353,7 +1353,7 @@ def __init__( **kwargs ): super(AzureDataExplorerCommandActivity, self).__init__(**kwargs) - self.type = 'AzureDataExplorerCommand' + self.type = 'AzureDataExplorerCommand' # type: str self.command = kwargs['command'] self.command_timeout = kwargs.get('command_timeout', None) @@ -1422,7 +1422,7 @@ def __init__( **kwargs ): super(AzureDataExplorerLinkedService, self).__init__(**kwargs) - self.type = 'AzureDataExplorer' + self.type = 'AzureDataExplorer' # type: str self.endpoint = kwargs['endpoint'] self.service_principal_id = kwargs['service_principal_id'] self.service_principal_key = kwargs['service_principal_key'] @@ -1485,7 +1485,7 @@ def __init__( **kwargs ): super(AzureDataExplorerTableDataset, self).__init__(**kwargs) - self.type = 'AzureDataExplorerTable' + self.type = 'AzureDataExplorerTable' # type: str self.table = kwargs.get('table', None) @@ -1562,7 +1562,7 @@ def __init__( **kwargs ): super(AzureDataLakeAnalyticsLinkedService, self).__init__(**kwargs) - self.type = 'AzureDataLakeAnalytics' + self.type = 'AzureDataLakeAnalytics' # type: str self.account_name = kwargs['account_name'] self.service_principal_id = kwargs.get('service_principal_id', None) self.service_principal_key = kwargs.get('service_principal_key', None) @@ -1645,7 +1645,7 @@ def __init__( **kwargs ): super(AzureDataLakeStoreLinkedService, self).__init__(**kwargs) - self.type = 'AzureDataLakeStore' + self.type = 'AzureDataLakeStore' # type: str self.data_lake_store_uri = kwargs['data_lake_store_uri'] self.service_principal_id = kwargs.get('service_principal_id', None) self.service_principal_key = kwargs.get('service_principal_key', None) @@ -1711,7 +1711,7 @@ def __init__( **kwargs ): super(AzureFileStorageLinkedService, self).__init__(**kwargs) - self.type = 'AzureFileStorage' + self.type = 'AzureFileStorage' # type: str self.host = kwargs['host'] self.user_id = kwargs.get('user_id', None) self.password = kwargs.get('password', None) @@ -1782,7 +1782,7 @@ def __init__( **kwargs ): super(AzureFunctionActivity, self).__init__(**kwargs) - self.type = 'AzureFunctionActivity' + self.type = 'AzureFunctionActivity' # type: str self.method = kwargs['method'] self.function_name = kwargs['function_name'] self.headers = kwargs.get('headers', None) @@ -1840,7 +1840,7 @@ def __init__( **kwargs ): super(AzureFunctionLinkedService, self).__init__(**kwargs) - self.type = 'AzureFunction' + self.type = 'AzureFunction' # type: str self.function_app_url = kwargs['function_app_url'] self.function_key = kwargs.get('function_key', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) @@ -1889,7 +1889,7 @@ def __init__( **kwargs ): super(AzureKeyVaultLinkedService, self).__init__(**kwargs) - self.type = 'AzureKeyVault' + self.type = 'AzureKeyVault' # type: str self.base_url = kwargs['base_url'] @@ -1922,7 +1922,7 @@ def __init__( **kwargs ): super(SecretBase, self).__init__(**kwargs) - self.type = None + self.type = None # type: Optional[str] class AzureKeyVaultSecretReference(SecretBase): @@ -1960,7 +1960,7 @@ def __init__( **kwargs ): super(AzureKeyVaultSecretReference, self).__init__(**kwargs) - self.type = 'AzureKeyVaultSecret' + self.type = 'AzureKeyVaultSecret' # type: str self.store = kwargs['store'] self.secret_name = kwargs['secret_name'] self.secret_version = kwargs.get('secret_version', None) @@ -2016,7 +2016,7 @@ def __init__( **kwargs ): super(AzureMariaDBLinkedService, self).__init__(**kwargs) - self.type = 'AzureMariaDB' + self.type = 'AzureMariaDB' # type: str self.connection_string = kwargs.get('connection_string', None) self.pwd = kwargs.get('pwd', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) @@ -2076,7 +2076,7 @@ def __init__( **kwargs ): super(AzureMariaDBTableDataset, self).__init__(**kwargs) - self.type = 'AzureMariaDBTable' + self.type = 'AzureMariaDBTable' # type: str self.table_name = kwargs.get('table_name', None) @@ -2143,7 +2143,7 @@ def __init__( **kwargs ): super(AzureMLBatchExecutionActivity, self).__init__(**kwargs) - self.type = 'AzureMLBatchExecution' + self.type = 'AzureMLBatchExecution' # type: str self.global_parameters = kwargs.get('global_parameters', None) self.web_service_outputs = kwargs.get('web_service_outputs', None) self.web_service_inputs = kwargs.get('web_service_inputs', None) @@ -2221,7 +2221,7 @@ def __init__( **kwargs ): super(AzureMLExecutePipelineActivity, self).__init__(**kwargs) - self.type = 'AzureMLExecutePipeline' + self.type = 'AzureMLExecutePipeline' # type: str self.ml_pipeline_id = kwargs['ml_pipeline_id'] self.experiment_name = kwargs.get('experiment_name', None) self.ml_pipeline_parameters = kwargs.get('ml_pipeline_parameters', None) @@ -2298,7 +2298,7 @@ def __init__( **kwargs ): super(AzureMLLinkedService, self).__init__(**kwargs) - self.type = 'AzureML' + self.type = 'AzureML' # type: str self.ml_endpoint = kwargs['ml_endpoint'] self.api_key = kwargs['api_key'] self.update_resource_endpoint = kwargs.get('update_resource_endpoint', None) @@ -2379,7 +2379,7 @@ def __init__( **kwargs ): super(AzureMLServiceLinkedService, self).__init__(**kwargs) - self.type = 'AzureMLService' + self.type = 'AzureMLService' # type: str self.subscription_id = kwargs['subscription_id'] self.resource_group_name = kwargs['resource_group_name'] self.ml_workspace_name = kwargs['ml_workspace_name'] @@ -2450,7 +2450,7 @@ def __init__( **kwargs ): super(AzureMLUpdateResourceActivity, self).__init__(**kwargs) - self.type = 'AzureMLUpdateResource' + self.type = 'AzureMLUpdateResource' # type: str self.trained_model_name = kwargs['trained_model_name'] self.trained_model_linked_service_name = kwargs['trained_model_linked_service_name'] self.trained_model_file_path = kwargs['trained_model_file_path'] @@ -2539,7 +2539,7 @@ def __init__( **kwargs ): super(AzureMySqlLinkedService, self).__init__(**kwargs) - self.type = 'AzureMySql' + self.type = 'AzureMySql' # type: str self.connection_string = kwargs['connection_string'] self.password = kwargs.get('password', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) @@ -2604,7 +2604,7 @@ def __init__( **kwargs ): super(AzureMySqlTableDataset, self).__init__(**kwargs) - self.type = 'AzureMySqlTable' + self.type = 'AzureMySqlTable' # type: str self.table_name = kwargs.get('table_name', None) self.table = kwargs.get('table', None) @@ -2659,7 +2659,7 @@ def __init__( **kwargs ): super(AzurePostgreSqlLinkedService, self).__init__(**kwargs) - self.type = 'AzurePostgreSql' + self.type = 'AzurePostgreSql' # type: str self.connection_string = kwargs.get('connection_string', None) self.password = kwargs.get('password', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) @@ -2728,7 +2728,7 @@ def __init__( **kwargs ): super(AzurePostgreSqlTableDataset, self).__init__(**kwargs) - self.type = 'AzurePostgreSqlTable' + self.type = 'AzurePostgreSqlTable' # type: str self.table_name = kwargs.get('table_name', None) self.table = kwargs.get('table', None) self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) @@ -2790,7 +2790,7 @@ def __init__( **kwargs ): super(AzureSearchIndexDataset, self).__init__(**kwargs) - self.type = 'AzureSearchIndex' + self.type = 'AzureSearchIndex' # type: str self.index_name = kwargs['index_name'] @@ -2845,7 +2845,7 @@ def __init__( **kwargs ): super(AzureSearchLinkedService, self).__init__(**kwargs) - self.type = 'AzureSearch' + self.type = 'AzureSearch' # type: str self.url = kwargs['url'] self.key = kwargs.get('key', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) @@ -2914,7 +2914,7 @@ def __init__( **kwargs ): super(AzureSqlDatabaseLinkedService, self).__init__(**kwargs) - self.type = 'AzureSqlDatabase' + self.type = 'AzureSqlDatabase' # type: str self.connection_string = kwargs['connection_string'] self.password = kwargs.get('password', None) self.service_principal_id = kwargs.get('service_principal_id', None) @@ -2986,7 +2986,7 @@ def __init__( **kwargs ): super(AzureSqlDWLinkedService, self).__init__(**kwargs) - self.type = 'AzureSqlDW' + self.type = 'AzureSqlDW' # type: str self.connection_string = kwargs['connection_string'] self.password = kwargs.get('password', None) self.service_principal_id = kwargs.get('service_principal_id', None) @@ -3058,7 +3058,7 @@ def __init__( **kwargs ): super(AzureSqlDWTableDataset, self).__init__(**kwargs) - self.type = 'AzureSqlDWTable' + self.type = 'AzureSqlDWTable' # type: str self.table_name = kwargs.get('table_name', None) self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) self.table = kwargs.get('table', None) @@ -3127,7 +3127,7 @@ def __init__( **kwargs ): super(AzureSqlMILinkedService, self).__init__(**kwargs) - self.type = 'AzureSqlMI' + self.type = 'AzureSqlMI' # type: str self.connection_string = kwargs['connection_string'] self.password = kwargs.get('password', None) self.service_principal_id = kwargs.get('service_principal_id', None) @@ -3199,7 +3199,7 @@ def __init__( **kwargs ): super(AzureSqlMITableDataset, self).__init__(**kwargs) - self.type = 'AzureSqlMITable' + self.type = 'AzureSqlMITable' # type: str self.table_name = kwargs.get('table_name', None) self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) self.table = kwargs.get('table', None) @@ -3268,7 +3268,7 @@ def __init__( **kwargs ): super(AzureSqlTableDataset, self).__init__(**kwargs) - self.type = 'AzureSqlTable' + self.type = 'AzureSqlTable' # type: str self.table_name = kwargs.get('table_name', None) self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) self.table = kwargs.get('table', None) @@ -3331,7 +3331,7 @@ def __init__( **kwargs ): super(AzureStorageLinkedService, self).__init__(**kwargs) - self.type = 'AzureStorage' + self.type = 'AzureStorage' # type: str self.connection_string = kwargs.get('connection_string', None) self.account_key = kwargs.get('account_key', None) self.sas_uri = kwargs.get('sas_uri', None) @@ -3395,7 +3395,7 @@ def __init__( **kwargs ): super(AzureTableDataset, self).__init__(**kwargs) - self.type = 'AzureTable' + self.type = 'AzureTable' # type: str self.table_name = kwargs['table_name'] @@ -3456,7 +3456,7 @@ def __init__( **kwargs ): super(AzureTableStorageLinkedService, self).__init__(**kwargs) - self.type = 'AzureTableStorage' + self.type = 'AzureTableStorage' # type: str self.connection_string = kwargs.get('connection_string', None) self.account_key = kwargs.get('account_key', None) self.sas_uri = kwargs.get('sas_uri', None) @@ -3467,18 +3467,17 @@ def __init__( class BigDataPoolReference(msrest.serialization.Model): """Big data pool reference. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. - :ivar type: Required. Big data pool reference type. Default value: "BigDataPoolReference". - :vartype type: str + :param type: Required. Big data pool reference type. Possible values include: + "BigDataPoolReference". + :type type: str or ~azure.synapse.artifacts.models.BigDataPoolReferenceType :param reference_name: Required. Reference big data pool name. :type reference_name: str """ _validation = { - 'type': {'required': True, 'constant': True}, + 'type': {'required': True}, 'reference_name': {'required': True}, } @@ -3487,13 +3486,12 @@ class BigDataPoolReference(msrest.serialization.Model): 'reference_name': {'key': 'referenceName', 'type': 'str'}, } - type = "BigDataPoolReference" - def __init__( self, **kwargs ): super(BigDataPoolReference, self).__init__(**kwargs) + self.type = kwargs['type'] self.reference_name = kwargs['reference_name'] @@ -3554,7 +3552,7 @@ def __init__( **kwargs ): super(BinaryDataset, self).__init__(**kwargs) - self.type = 'Binary' + self.type = 'Binary' # type: str self.location = kwargs.get('location', None) self.compression = kwargs.get('compression', None) @@ -3622,7 +3620,7 @@ def __init__( **kwargs ): super(CassandraLinkedService, self).__init__(**kwargs) - self.type = 'Cassandra' + self.type = 'Cassandra' # type: str self.host = kwargs['host'] self.authentication_type = kwargs.get('authentication_type', None) self.port = kwargs.get('port', None) @@ -3690,7 +3688,7 @@ def __init__( **kwargs ): super(CassandraTableDataset, self).__init__(**kwargs) - self.type = 'CassandraTable' + self.type = 'CassandraTable' # type: str self.table_name = kwargs.get('table_name', None) self.keyspace = kwargs.get('keyspace', None) @@ -3788,7 +3786,7 @@ def __init__( **kwargs ): super(CommonDataServiceForAppsEntityDataset, self).__init__(**kwargs) - self.type = 'CommonDataServiceForAppsEntity' + self.type = 'CommonDataServiceForAppsEntity' # type: str self.entity_name = kwargs.get('entity_name', None) @@ -3896,7 +3894,7 @@ def __init__( **kwargs ): super(CommonDataServiceForAppsLinkedService, self).__init__(**kwargs) - self.type = 'CommonDataServiceForApps' + self.type = 'CommonDataServiceForApps' # type: str self.deployment_type = kwargs['deployment_type'] self.host_name = kwargs.get('host_name', None) self.port = kwargs.get('port', None) @@ -3979,7 +3977,7 @@ def __init__( **kwargs ): super(ConcurLinkedService, self).__init__(**kwargs) - self.type = 'Concur' + self.type = 'Concur' # type: str self.client_id = kwargs['client_id'] self.username = kwargs['username'] self.password = kwargs.get('password', None) @@ -4043,7 +4041,7 @@ def __init__( **kwargs ): super(ConcurObjectDataset, self).__init__(**kwargs) - self.type = 'ConcurObject' + self.type = 'ConcurObject' # type: str self.table_name = kwargs.get('table_name', None) @@ -4086,7 +4084,7 @@ def __init__( **kwargs ): super(ControlActivity, self).__init__(**kwargs) - self.type = 'Container' + self.type = 'Container' # type: str class CopyActivity(ExecutionActivity): @@ -4182,7 +4180,7 @@ def __init__( **kwargs ): super(CopyActivity, self).__init__(**kwargs) - self.type = 'Copy' + self.type = 'Copy' # type: str self.inputs = kwargs.get('inputs', None) self.outputs = kwargs.get('outputs', None) self.source = kwargs['source'] @@ -4252,7 +4250,7 @@ def __init__( ): super(CopySink, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) - self.type = 'CopySink' + self.type = 'CopySink' # type: str self.write_batch_size = kwargs.get('write_batch_size', None) self.write_batch_timeout = kwargs.get('write_batch_timeout', None) self.sink_retry_count = kwargs.get('sink_retry_count', None) @@ -4306,7 +4304,7 @@ def __init__( ): super(CopySource, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) - self.type = 'CopySource' + self.type = 'CopySource' # type: str self.source_retry_count = kwargs.get('source_retry_count', None) self.source_retry_wait = kwargs.get('source_retry_wait', None) self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) @@ -4370,7 +4368,7 @@ def __init__( **kwargs ): super(CosmosDbLinkedService, self).__init__(**kwargs) - self.type = 'CosmosDb' + self.type = 'CosmosDb' # type: str self.connection_string = kwargs.get('connection_string', None) self.account_endpoint = kwargs.get('account_endpoint', None) self.database = kwargs.get('database', None) @@ -4434,7 +4432,7 @@ def __init__( **kwargs ): super(CosmosDbMongoDbApiCollectionDataset, self).__init__(**kwargs) - self.type = 'CosmosDbMongoDbApiCollection' + self.type = 'CosmosDbMongoDbApiCollection' # type: str self.collection = kwargs['collection'] @@ -4487,7 +4485,7 @@ def __init__( **kwargs ): super(CosmosDbMongoDbApiLinkedService, self).__init__(**kwargs) - self.type = 'CosmosDbMongoDbApi' + self.type = 'CosmosDbMongoDbApi' # type: str self.connection_string = kwargs['connection_string'] self.database = kwargs['database'] @@ -4548,7 +4546,7 @@ def __init__( **kwargs ): super(CosmosDbSqlApiCollectionDataset, self).__init__(**kwargs) - self.type = 'CosmosDbSqlApiCollection' + self.type = 'CosmosDbSqlApiCollection' # type: str self.collection_name = kwargs['collection_name'] @@ -4602,7 +4600,7 @@ def __init__( **kwargs ): super(CouchbaseLinkedService, self).__init__(**kwargs) - self.type = 'Couchbase' + self.type = 'Couchbase' # type: str self.connection_string = kwargs.get('connection_string', None) self.cred_string = kwargs.get('cred_string', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) @@ -4662,7 +4660,7 @@ def __init__( **kwargs ): super(CouchbaseTableDataset, self).__init__(**kwargs) - self.type = 'CouchbaseTable' + self.type = 'CouchbaseTable' # type: str self.table_name = kwargs.get('table_name', None) @@ -4818,7 +4816,7 @@ def __init__( **kwargs ): super(CustomActivity, self).__init__(**kwargs) - self.type = 'Custom' + self.type = 'Custom' # type: str self.command = kwargs['command'] self.resource_linked_service = kwargs.get('resource_linked_service', None) self.folder_path = kwargs.get('folder_path', None) @@ -4892,7 +4890,7 @@ def __init__( **kwargs ): super(CustomDataSourceLinkedService, self).__init__(**kwargs) - self.type = 'CustomDataSource' + self.type = 'CustomDataSource' # type: str self.type_properties = kwargs['type_properties'] @@ -4954,7 +4952,7 @@ def __init__( **kwargs ): super(DatabricksNotebookActivity, self).__init__(**kwargs) - self.type = 'DatabricksNotebook' + self.type = 'DatabricksNotebook' # type: str self.notebook_path = kwargs['notebook_path'] self.base_parameters = kwargs.get('base_parameters', None) self.libraries = kwargs.get('libraries', None) @@ -5017,7 +5015,7 @@ def __init__( **kwargs ): super(DatabricksSparkJarActivity, self).__init__(**kwargs) - self.type = 'DatabricksSparkJar' + self.type = 'DatabricksSparkJar' # type: str self.main_class_name = kwargs['main_class_name'] self.parameters = kwargs.get('parameters', None) self.libraries = kwargs.get('libraries', None) @@ -5079,7 +5077,7 @@ def __init__( **kwargs ): super(DatabricksSparkPythonActivity, self).__init__(**kwargs) - self.type = 'DatabricksSparkPython' + self.type = 'DatabricksSparkPython' # type: str self.python_file = kwargs['python_file'] self.parameters = kwargs.get('parameters', None) self.libraries = kwargs.get('libraries', None) @@ -5124,7 +5122,7 @@ def __init__( **kwargs ): super(DataFlow, self).__init__(**kwargs) - self.type = None + self.type = None # type: Optional[str] self.description = kwargs.get('description', None) self.annotations = kwargs.get('annotations', None) self.folder = kwargs.get('folder', None) @@ -5520,15 +5518,13 @@ def __init__( class DataFlowReference(msrest.serialization.Model): """Data flow reference type. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :ivar type: Required. Data flow reference type. Default value: "DataFlowReference". - :vartype type: str + :param type: Required. Data flow reference type. Possible values include: "DataFlowReference". + :type type: str or ~azure.synapse.artifacts.models.DataFlowReferenceType :param reference_name: Required. Reference data flow name. :type reference_name: str :param dataset_parameters: Reference data flow parameters from dataset. @@ -5536,7 +5532,7 @@ class DataFlowReference(msrest.serialization.Model): """ _validation = { - 'type': {'required': True, 'constant': True}, + 'type': {'required': True}, 'reference_name': {'required': True}, } @@ -5547,14 +5543,13 @@ class DataFlowReference(msrest.serialization.Model): 'dataset_parameters': {'key': 'datasetParameters', 'type': 'object'}, } - type = "DataFlowReference" - def __init__( self, **kwargs ): super(DataFlowReference, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) + self.type = kwargs['type'] self.reference_name = kwargs['reference_name'] self.dataset_parameters = kwargs.get('dataset_parameters', None) @@ -5858,7 +5853,7 @@ def __init__( **kwargs ): super(DataLakeAnalyticsUSQLActivity, self).__init__(**kwargs) - self.type = 'DataLakeAnalyticsU-SQL' + self.type = 'DataLakeAnalyticsU-SQL' # type: str self.script_path = kwargs['script_path'] self.script_linked_service = kwargs['script_linked_service'] self.degree_of_parallelism = kwargs.get('degree_of_parallelism', None) @@ -5902,7 +5897,7 @@ def __init__( ): super(DatasetCompression, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) - self.type = 'DatasetCompression' + self.type = 'DatasetCompression' # type: str class DatasetBZip2Compression(DatasetCompression): @@ -5931,7 +5926,7 @@ def __init__( **kwargs ): super(DatasetBZip2Compression, self).__init__(**kwargs) - self.type = 'BZip2' + self.type = 'BZip2' # type: str class DatasetDebugResource(SubResourceDebugResource): @@ -5991,7 +5986,7 @@ def __init__( **kwargs ): super(DatasetDeflateCompression, self).__init__(**kwargs) - self.type = 'Deflate' + self.type = 'Deflate' # type: str self.level = kwargs.get('level', None) @@ -6043,7 +6038,7 @@ def __init__( **kwargs ): super(DatasetGZipCompression, self).__init__(**kwargs) - self.type = 'GZip' + self.type = 'GZip' # type: str self.level = kwargs.get('level', None) @@ -6118,7 +6113,7 @@ def __init__( ): super(DatasetLocation, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) - self.type = 'DatasetLocation' + self.type = 'DatasetLocation' # type: str self.folder_path = kwargs.get('folder_path', None) self.file_name = kwargs.get('file_name', None) @@ -6126,12 +6121,10 @@ def __init__( class DatasetReference(msrest.serialization.Model): """Dataset reference type. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. - :ivar type: Required. Dataset reference type. Default value: "DatasetReference". - :vartype type: str + :param type: Required. Dataset reference type. Possible values include: "DatasetReference". + :type type: str or ~azure.synapse.artifacts.models.DatasetReferenceType :param reference_name: Required. Reference dataset name. :type reference_name: str :param parameters: Arguments for dataset. @@ -6139,7 +6132,7 @@ class DatasetReference(msrest.serialization.Model): """ _validation = { - 'type': {'required': True, 'constant': True}, + 'type': {'required': True}, 'reference_name': {'required': True}, } @@ -6149,13 +6142,12 @@ class DatasetReference(msrest.serialization.Model): 'parameters': {'key': 'parameters', 'type': '{object}'}, } - type = "DatasetReference" - def __init__( self, **kwargs ): super(DatasetReference, self).__init__(**kwargs) + self.type = kwargs['type'] self.reference_name = kwargs['reference_name'] self.parameters = kwargs.get('parameters', None) @@ -6232,15 +6224,13 @@ def __init__( **kwargs ): super(DatasetZipDeflateCompression, self).__init__(**kwargs) - self.type = 'ZipDeflate' + self.type = 'ZipDeflate' # type: str self.level = kwargs.get('level', None) class Db2LinkedService(LinkedService): """Linked service for DB2 data source. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this @@ -6262,9 +6252,9 @@ class Db2LinkedService(LinkedService): :param database: Required. Database name for connection. Type: string (or Expression with resultType string). :type database: object - :ivar authentication_type: AuthenticationType to be used for connection. Default value: - "Basic". - :vartype authentication_type: str + :param authentication_type: AuthenticationType to be used for connection. Possible values + include: "Basic". + :type authentication_type: str or ~azure.synapse.artifacts.models.Db2AuthenticationType :param username: Username for authentication. Type: string (or Expression with resultType string). :type username: object @@ -6286,7 +6276,6 @@ class Db2LinkedService(LinkedService): 'type': {'required': True}, 'server': {'required': True}, 'database': {'required': True}, - 'authentication_type': {'constant': True}, } _attribute_map = { @@ -6306,16 +6295,15 @@ class Db2LinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - authentication_type = "Basic" - def __init__( self, **kwargs ): super(Db2LinkedService, self).__init__(**kwargs) - self.type = 'Db2' + self.type = 'Db2' # type: str self.server = kwargs['server'] self.database = kwargs['database'] + self.authentication_type = kwargs.get('authentication_type', None) self.username = kwargs.get('username', None) self.password = kwargs.get('password', None) self.package_collection = kwargs.get('package_collection', None) @@ -6385,7 +6373,7 @@ def __init__( **kwargs ): super(Db2TableDataset, self).__init__(**kwargs) - self.type = 'Db2Table' + self.type = 'Db2Table' # type: str self.table_name = kwargs.get('table_name', None) self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) self.table = kwargs.get('table', None) @@ -6457,7 +6445,7 @@ def __init__( **kwargs ): super(DeleteActivity, self).__init__(**kwargs) - self.type = 'Delete' + self.type = 'Delete' # type: str self.recursive = kwargs.get('recursive', None) self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) self.enable_logging = kwargs.get('enable_logging', None) @@ -6578,7 +6566,7 @@ def __init__( **kwargs ): super(DelimitedTextDataset, self).__init__(**kwargs) - self.type = 'DelimitedText' + self.type = 'DelimitedText' # type: str self.location = kwargs.get('location', None) self.column_delimiter = kwargs.get('column_delimiter', None) self.row_delimiter = kwargs.get('row_delimiter', None) @@ -6647,7 +6635,7 @@ def __init__( **kwargs ): super(DocumentDbCollectionDataset, self).__init__(**kwargs) - self.type = 'DocumentDbCollection' + self.type = 'DocumentDbCollection' # type: str self.collection_name = kwargs['collection_name'] @@ -6701,7 +6689,7 @@ def __init__( **kwargs ): super(DrillLinkedService, self).__init__(**kwargs) - self.type = 'Drill' + self.type = 'Drill' # type: str self.connection_string = kwargs.get('connection_string', None) self.pwd = kwargs.get('pwd', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) @@ -6769,7 +6757,7 @@ def __init__( **kwargs ): super(DrillTableDataset, self).__init__(**kwargs) - self.type = 'DrillTable' + self.type = 'DrillTable' # type: str self.table_name = kwargs.get('table_name', None) self.table = kwargs.get('table', None) self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) @@ -6845,7 +6833,7 @@ def __init__( **kwargs ): super(DynamicsAXLinkedService, self).__init__(**kwargs) - self.type = 'DynamicsAX' + self.type = 'DynamicsAX' # type: str self.url = kwargs['url'] self.service_principal_id = kwargs['service_principal_id'] self.service_principal_key = kwargs['service_principal_key'] @@ -6910,7 +6898,7 @@ def __init__( **kwargs ): super(DynamicsAXResourceDataset, self).__init__(**kwargs) - self.type = 'DynamicsAXResource' + self.type = 'DynamicsAXResource' # type: str self.path = kwargs['path'] @@ -6969,7 +6957,7 @@ def __init__( **kwargs ): super(DynamicsCrmEntityDataset, self).__init__(**kwargs) - self.type = 'DynamicsCrmEntity' + self.type = 'DynamicsCrmEntity' # type: str self.entity_name = kwargs.get('entity_name', None) @@ -7075,7 +7063,7 @@ def __init__( **kwargs ): super(DynamicsCrmLinkedService, self).__init__(**kwargs) - self.type = 'DynamicsCrm' + self.type = 'DynamicsCrm' # type: str self.deployment_type = kwargs['deployment_type'] self.host_name = kwargs.get('host_name', None) self.port = kwargs.get('port', None) @@ -7145,7 +7133,7 @@ def __init__( **kwargs ): super(DynamicsEntityDataset, self).__init__(**kwargs) - self.type = 'DynamicsEntity' + self.type = 'DynamicsEntity' # type: str self.entity_name = kwargs.get('entity_name', None) @@ -7248,7 +7236,7 @@ def __init__( **kwargs ): super(DynamicsLinkedService, self).__init__(**kwargs) - self.type = 'Dynamics' + self.type = 'Dynamics' # type: str self.deployment_type = kwargs['deployment_type'] self.host_name = kwargs.get('host_name', None) self.port = kwargs.get('port', None) @@ -7331,7 +7319,7 @@ def __init__( **kwargs ): super(EloquaLinkedService, self).__init__(**kwargs) - self.type = 'Eloqua' + self.type = 'Eloqua' # type: str self.endpoint = kwargs['endpoint'] self.username = kwargs['username'] self.password = kwargs.get('password', None) @@ -7395,7 +7383,7 @@ def __init__( **kwargs ): super(EloquaObjectDataset, self).__init__(**kwargs) - self.type = 'EloquaObject' + self.type = 'EloquaObject' # type: str self.table_name = kwargs.get('table_name', None) @@ -7492,7 +7480,7 @@ def __init__( **kwargs ): super(ExecuteDataFlowActivity, self).__init__(**kwargs) - self.type = 'ExecuteDataFlow' + self.type = 'ExecuteDataFlow' # type: str self.data_flow = kwargs['data_flow'] self.staging = kwargs.get('staging', None) self.integration_runtime = kwargs.get('integration_runtime', None) @@ -7574,7 +7562,7 @@ def __init__( **kwargs ): super(ExecutePipelineActivity, self).__init__(**kwargs) - self.type = 'ExecutePipeline' + self.type = 'ExecutePipeline' # type: str self.pipeline = kwargs['pipeline'] self.parameters = kwargs.get('parameters', None) self.wait_on_completion = kwargs.get('wait_on_completion', None) @@ -7668,7 +7656,7 @@ def __init__( **kwargs ): super(ExecuteSSISPackageActivity, self).__init__(**kwargs) - self.type = 'ExecuteSSISPackage' + self.type = 'ExecuteSSISPackage' # type: str self.package_location = kwargs['package_location'] self.runtime = kwargs.get('runtime', None) self.logging_level = kwargs.get('logging_level', None) @@ -7739,18 +7727,16 @@ def __init__( class Expression(msrest.serialization.Model): """Azure Synapse expression definition. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. - :ivar type: Required. Expression type. Default value: "Expression". - :vartype type: str + :param type: Required. Expression type. Possible values include: "Expression". + :type type: str or ~azure.synapse.artifacts.models.ExpressionType :param value: Required. Expression value. :type value: str """ _validation = { - 'type': {'required': True, 'constant': True}, + 'type': {'required': True}, 'value': {'required': True}, } @@ -7759,13 +7745,12 @@ class Expression(msrest.serialization.Model): 'value': {'key': 'value', 'type': 'str'}, } - type = "Expression" - def __init__( self, **kwargs ): super(Expression, self).__init__(**kwargs) + self.type = kwargs['type'] self.value = kwargs['value'] @@ -7824,7 +7809,7 @@ def __init__( **kwargs ): super(FileServerLinkedService, self).__init__(**kwargs) - self.type = 'FileServer' + self.type = 'FileServer' # type: str self.host = kwargs['host'] self.user_id = kwargs.get('user_id', None) self.password = kwargs.get('password', None) @@ -7878,7 +7863,7 @@ def __init__( **kwargs ): super(FilterActivity, self).__init__(**kwargs) - self.type = 'Filter' + self.type = 'Filter' # type: str self.items = kwargs['items'] self.condition = kwargs['condition'] @@ -7938,7 +7923,7 @@ def __init__( **kwargs ): super(ForEachActivity, self).__init__(**kwargs) - self.type = 'ForEach' + self.type = 'ForEach' # type: str self.is_sequential = kwargs.get('is_sequential', None) self.batch_count = kwargs.get('batch_count', None) self.items = kwargs['items'] @@ -8017,7 +8002,7 @@ def __init__( **kwargs ): super(FtpServerLinkedService, self).__init__(**kwargs) - self.type = 'FtpServer' + self.type = 'FtpServer' # type: str self.host = kwargs['host'] self.port = kwargs.get('port', None) self.authentication_type = kwargs.get('authentication_type', None) @@ -8080,7 +8065,7 @@ def __init__( **kwargs ): super(GetMetadataActivity, self).__init__(**kwargs) - self.type = 'GetMetadata' + self.type = 'GetMetadata' # type: str self.dataset = kwargs['dataset'] self.field_list = kwargs.get('field_list', None) @@ -8193,7 +8178,7 @@ def __init__( **kwargs ): super(GoogleAdWordsLinkedService, self).__init__(**kwargs) - self.type = 'GoogleAdWords' + self.type = 'GoogleAdWords' # type: str self.client_customer_id = kwargs['client_customer_id'] self.developer_token = kwargs['developer_token'] self.authentication_type = kwargs['authentication_type'] @@ -8261,7 +8246,7 @@ def __init__( **kwargs ): super(GoogleAdWordsObjectDataset, self).__init__(**kwargs) - self.type = 'GoogleAdWordsObject' + self.type = 'GoogleAdWordsObject' # type: str self.table_name = kwargs.get('table_name', None) @@ -8356,7 +8341,7 @@ def __init__( **kwargs ): super(GoogleBigQueryLinkedService, self).__init__(**kwargs) - self.type = 'GoogleBigQuery' + self.type = 'GoogleBigQuery' # type: str self.project = kwargs['project'] self.additional_projects = kwargs.get('additional_projects', None) self.request_google_drive_scope = kwargs.get('request_google_drive_scope', None) @@ -8434,7 +8419,7 @@ def __init__( **kwargs ): super(GoogleBigQueryObjectDataset, self).__init__(**kwargs) - self.type = 'GoogleBigQueryObject' + self.type = 'GoogleBigQueryObject' # type: str self.table_name = kwargs.get('table_name', None) self.table = kwargs.get('table', None) self.dataset = kwargs.get('dataset', None) @@ -8497,7 +8482,7 @@ def __init__( **kwargs ): super(GoogleCloudStorageLinkedService, self).__init__(**kwargs) - self.type = 'GoogleCloudStorage' + self.type = 'GoogleCloudStorage' # type: str self.access_key_id = kwargs.get('access_key_id', None) self.secret_access_key = kwargs.get('secret_access_key', None) self.service_url = kwargs.get('service_url', None) @@ -8554,7 +8539,7 @@ def __init__( **kwargs ): super(GreenplumLinkedService, self).__init__(**kwargs) - self.type = 'Greenplum' + self.type = 'Greenplum' # type: str self.connection_string = kwargs.get('connection_string', None) self.pwd = kwargs.get('pwd', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) @@ -8622,7 +8607,7 @@ def __init__( **kwargs ): super(GreenplumTableDataset, self).__init__(**kwargs) - self.type = 'GreenplumTable' + self.type = 'GreenplumTable' # type: str self.table_name = kwargs.get('table_name', None) self.table = kwargs.get('table', None) self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) @@ -8711,7 +8696,7 @@ def __init__( **kwargs ): super(HBaseLinkedService, self).__init__(**kwargs) - self.type = 'HBase' + self.type = 'HBase' # type: str self.host = kwargs['host'] self.port = kwargs.get('port', None) self.http_path = kwargs.get('http_path', None) @@ -8779,7 +8764,7 @@ def __init__( **kwargs ): super(HBaseObjectDataset, self).__init__(**kwargs) - self.type = 'HBaseObject' + self.type = 'HBaseObject' # type: str self.table_name = kwargs.get('table_name', None) @@ -8842,7 +8827,7 @@ def __init__( **kwargs ): super(HdfsLinkedService, self).__init__(**kwargs) - self.type = 'Hdfs' + self.type = 'Hdfs' # type: str self.url = kwargs['url'] self.authentication_type = kwargs.get('authentication_type', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) @@ -8920,7 +8905,7 @@ def __init__( **kwargs ): super(HDInsightHiveActivity, self).__init__(**kwargs) - self.type = 'HDInsightHive' + self.type = 'HDInsightHive' # type: str self.storage_linked_services = kwargs.get('storage_linked_services', None) self.arguments = kwargs.get('arguments', None) self.get_debug_info = kwargs.get('get_debug_info', None) @@ -9001,7 +8986,7 @@ def __init__( **kwargs ): super(HDInsightLinkedService, self).__init__(**kwargs) - self.type = 'HDInsight' + self.type = 'HDInsight' # type: str self.cluster_uri = kwargs['cluster_uri'] self.user_name = kwargs.get('user_name', None) self.password = kwargs.get('password', None) @@ -9083,7 +9068,7 @@ def __init__( **kwargs ): super(HDInsightMapReduceActivity, self).__init__(**kwargs) - self.type = 'HDInsightMapReduce' + self.type = 'HDInsightMapReduce' # type: str self.storage_linked_services = kwargs.get('storage_linked_services', None) self.arguments = kwargs.get('arguments', None) self.get_debug_info = kwargs.get('get_debug_info', None) @@ -9272,7 +9257,7 @@ def __init__( **kwargs ): super(HDInsightOnDemandLinkedService, self).__init__(**kwargs) - self.type = 'HDInsightOnDemand' + self.type = 'HDInsightOnDemand' # type: str self.cluster_size = kwargs['cluster_size'] self.time_to_live = kwargs['time_to_live'] self.version = kwargs['version'] @@ -9372,7 +9357,7 @@ def __init__( **kwargs ): super(HDInsightPigActivity, self).__init__(**kwargs) - self.type = 'HDInsightPig' + self.type = 'HDInsightPig' # type: str self.storage_linked_services = kwargs.get('storage_linked_services', None) self.arguments = kwargs.get('arguments', None) self.get_debug_info = kwargs.get('get_debug_info', None) @@ -9456,7 +9441,7 @@ def __init__( **kwargs ): super(HDInsightSparkActivity, self).__init__(**kwargs) - self.type = 'HDInsightSpark' + self.type = 'HDInsightSpark' # type: str self.root_path = kwargs['root_path'] self.entry_file_path = kwargs['entry_file_path'] self.arguments = kwargs.get('arguments', None) @@ -9555,7 +9540,7 @@ def __init__( **kwargs ): super(HDInsightStreamingActivity, self).__init__(**kwargs) - self.type = 'HDInsightStreaming' + self.type = 'HDInsightStreaming' # type: str self.storage_linked_services = kwargs.get('storage_linked_services', None) self.arguments = kwargs.get('arguments', None) self.get_debug_info = kwargs.get('get_debug_info', None) @@ -9678,7 +9663,7 @@ def __init__( **kwargs ): super(HiveLinkedService, self).__init__(**kwargs) - self.type = 'Hive' + self.type = 'Hive' # type: str self.host = kwargs['host'] self.port = kwargs.get('port', None) self.server_type = kwargs.get('server_type', None) @@ -9760,7 +9745,7 @@ def __init__( **kwargs ): super(HiveObjectDataset, self).__init__(**kwargs) - self.type = 'HiveObject' + self.type = 'HiveObject' # type: str self.table_name = kwargs.get('table_name', None) self.table = kwargs.get('table', None) self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) @@ -9842,7 +9827,7 @@ def __init__( **kwargs ): super(HttpLinkedService, self).__init__(**kwargs) - self.type = 'HttpServer' + self.type = 'HttpServer' # type: str self.url = kwargs['url'] self.authentication_type = kwargs.get('authentication_type', None) self.user_name = kwargs.get('user_name', None) @@ -9924,7 +9909,7 @@ def __init__( **kwargs ): super(HubspotLinkedService, self).__init__(**kwargs) - self.type = 'Hubspot' + self.type = 'Hubspot' # type: str self.client_id = kwargs['client_id'] self.client_secret = kwargs.get('client_secret', None) self.access_token = kwargs.get('access_token', None) @@ -9989,7 +9974,7 @@ def __init__( **kwargs ): super(HubspotObjectDataset, self).__init__(**kwargs) - self.type = 'HubspotObject' + self.type = 'HubspotObject' # type: str self.table_name = kwargs.get('table_name', None) @@ -10046,7 +10031,7 @@ def __init__( **kwargs ): super(IfConditionActivity, self).__init__(**kwargs) - self.type = 'IfCondition' + self.type = 'IfCondition' # type: str self.expression = kwargs['expression'] self.if_true_activities = kwargs.get('if_true_activities', None) self.if_false_activities = kwargs.get('if_false_activities', None) @@ -10137,7 +10122,7 @@ def __init__( **kwargs ): super(ImpalaLinkedService, self).__init__(**kwargs) - self.type = 'Impala' + self.type = 'Impala' # type: str self.host = kwargs['host'] self.port = kwargs.get('port', None) self.authentication_type = kwargs['authentication_type'] @@ -10214,7 +10199,7 @@ def __init__( **kwargs ): super(ImpalaObjectDataset, self).__init__(**kwargs) - self.type = 'ImpalaObject' + self.type = 'ImpalaObject' # type: str self.table_name = kwargs.get('table_name', None) self.table = kwargs.get('table', None) self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) @@ -10285,7 +10270,7 @@ def __init__( **kwargs ): super(InformixLinkedService, self).__init__(**kwargs) - self.type = 'Informix' + self.type = 'Informix' # type: str self.connection_string = kwargs['connection_string'] self.authentication_type = kwargs.get('authentication_type', None) self.credential = kwargs.get('credential', None) @@ -10349,20 +10334,18 @@ def __init__( **kwargs ): super(InformixTableDataset, self).__init__(**kwargs) - self.type = 'InformixTable' + self.type = 'InformixTable' # type: str self.table_name = kwargs.get('table_name', None) class IntegrationRuntimeReference(msrest.serialization.Model): """Integration runtime reference type. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. - :ivar type: Required. Type of integration runtime. Default value: + :param type: Required. Type of integration runtime. Possible values include: "IntegrationRuntimeReference". - :vartype type: str + :type type: str or ~azure.synapse.artifacts.models.IntegrationRuntimeReferenceType :param reference_name: Required. Reference integration runtime name. :type reference_name: str :param parameters: Arguments for integration runtime. @@ -10370,7 +10353,7 @@ class IntegrationRuntimeReference(msrest.serialization.Model): """ _validation = { - 'type': {'required': True, 'constant': True}, + 'type': {'required': True}, 'reference_name': {'required': True}, } @@ -10380,13 +10363,12 @@ class IntegrationRuntimeReference(msrest.serialization.Model): 'parameters': {'key': 'parameters', 'type': '{object}'}, } - type = "IntegrationRuntimeReference" - def __init__( self, **kwargs ): super(IntegrationRuntimeReference, self).__init__(**kwargs) + self.type = kwargs['type'] self.reference_name = kwargs['reference_name'] self.parameters = kwargs.get('parameters', None) @@ -10464,7 +10446,7 @@ def __init__( **kwargs ): super(JiraLinkedService, self).__init__(**kwargs) - self.type = 'Jira' + self.type = 'Jira' # type: str self.host = kwargs['host'] self.port = kwargs.get('port', None) self.username = kwargs['username'] @@ -10529,7 +10511,7 @@ def __init__( **kwargs ): super(JiraObjectDataset, self).__init__(**kwargs) - self.type = 'JiraObject' + self.type = 'JiraObject' # type: str self.table_name = kwargs.get('table_name', None) @@ -10597,7 +10579,7 @@ def __init__( **kwargs ): super(JsonDataset, self).__init__(**kwargs) - self.type = 'Json' + self.type = 'Json' # type: str self.location = kwargs.get('location', None) self.encoding_name = kwargs.get('encoding_name', None) self.compression = kwargs.get('compression', None) @@ -10663,12 +10645,11 @@ def __init__( class LinkedServiceReference(msrest.serialization.Model): """Linked service reference type. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. - :ivar type: Required. Linked service reference type. Default value: "LinkedServiceReference". - :vartype type: str + :param type: Required. Linked service reference type. Possible values include: + "LinkedServiceReference". + :type type: str or ~azure.synapse.artifacts.models.Type :param reference_name: Required. Reference LinkedService name. :type reference_name: str :param parameters: Arguments for LinkedService. @@ -10676,7 +10657,7 @@ class LinkedServiceReference(msrest.serialization.Model): """ _validation = { - 'type': {'required': True, 'constant': True}, + 'type': {'required': True}, 'reference_name': {'required': True}, } @@ -10686,13 +10667,12 @@ class LinkedServiceReference(msrest.serialization.Model): 'parameters': {'key': 'parameters', 'type': '{object}'}, } - type = "LinkedServiceReference" - def __init__( self, **kwargs ): super(LinkedServiceReference, self).__init__(**kwargs) + self.type = kwargs['type'] self.reference_name = kwargs['reference_name'] self.parameters = kwargs.get('parameters', None) @@ -10832,7 +10812,7 @@ def __init__( **kwargs ): super(LookupActivity, self).__init__(**kwargs) - self.type = 'Lookup' + self.type = 'Lookup' # type: str self.source = kwargs['source'] self.dataset = kwargs['dataset'] self.first_row_only = kwargs.get('first_row_only', None) @@ -10901,7 +10881,7 @@ def __init__( **kwargs ): super(MagentoLinkedService, self).__init__(**kwargs) - self.type = 'Magento' + self.type = 'Magento' # type: str self.host = kwargs['host'] self.access_token = kwargs.get('access_token', None) self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) @@ -10964,7 +10944,7 @@ def __init__( **kwargs ): super(MagentoObjectDataset, self).__init__(**kwargs) - self.type = 'MagentoObject' + self.type = 'MagentoObject' # type: str self.table_name = kwargs.get('table_name', None) @@ -11012,7 +10992,7 @@ def __init__( **kwargs ): super(MappingDataFlow, self).__init__(**kwargs) - self.type = 'MappingDataFlow' + self.type = 'MappingDataFlow' # type: str self.sources = kwargs.get('sources', None) self.sinks = kwargs.get('sinks', None) self.transformations = kwargs.get('transformations', None) @@ -11069,7 +11049,7 @@ def __init__( **kwargs ): super(MariaDBLinkedService, self).__init__(**kwargs) - self.type = 'MariaDB' + self.type = 'MariaDB' # type: str self.connection_string = kwargs.get('connection_string', None) self.pwd = kwargs.get('pwd', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) @@ -11129,7 +11109,7 @@ def __init__( **kwargs ): super(MariaDBTableDataset, self).__init__(**kwargs) - self.type = 'MariaDBTable' + self.type = 'MariaDBTable' # type: str self.table_name = kwargs.get('table_name', None) @@ -11200,7 +11180,7 @@ def __init__( **kwargs ): super(MarketoLinkedService, self).__init__(**kwargs) - self.type = 'Marketo' + self.type = 'Marketo' # type: str self.endpoint = kwargs['endpoint'] self.client_id = kwargs['client_id'] self.client_secret = kwargs.get('client_secret', None) @@ -11264,7 +11244,7 @@ def __init__( **kwargs ): super(MarketoObjectDataset, self).__init__(**kwargs) - self.type = 'MarketoObject' + self.type = 'MarketoObject' # type: str self.table_name = kwargs.get('table_name', None) @@ -11333,7 +11313,7 @@ def __init__( **kwargs ): super(MicrosoftAccessLinkedService, self).__init__(**kwargs) - self.type = 'MicrosoftAccess' + self.type = 'MicrosoftAccess' # type: str self.connection_string = kwargs['connection_string'] self.authentication_type = kwargs.get('authentication_type', None) self.credential = kwargs.get('credential', None) @@ -11397,7 +11377,7 @@ def __init__( **kwargs ): super(MicrosoftAccessTableDataset, self).__init__(**kwargs) - self.type = 'MicrosoftAccessTable' + self.type = 'MicrosoftAccessTable' # type: str self.table_name = kwargs.get('table_name', None) @@ -11457,7 +11437,7 @@ def __init__( **kwargs ): super(MongoDbCollectionDataset, self).__init__(**kwargs) - self.type = 'MongoDbCollection' + self.type = 'MongoDbCollection' # type: str self.collection_name = kwargs['collection_name'] @@ -11541,7 +11521,7 @@ def __init__( **kwargs ): super(MongoDbLinkedService, self).__init__(**kwargs) - self.type = 'MongoDb' + self.type = 'MongoDb' # type: str self.server = kwargs['server'] self.authentication_type = kwargs.get('authentication_type', None) self.database_name = kwargs['database_name'] @@ -11610,7 +11590,7 @@ def __init__( **kwargs ): super(MongoDbV2CollectionDataset, self).__init__(**kwargs) - self.type = 'MongoDbV2Collection' + self.type = 'MongoDbV2Collection' # type: str self.collection = kwargs['collection'] @@ -11662,7 +11642,7 @@ def __init__( **kwargs ): super(MongoDbV2LinkedService, self).__init__(**kwargs) - self.type = 'MongoDbV2' + self.type = 'MongoDbV2' # type: str self.connection_string = kwargs['connection_string'] self.database = kwargs['database'] @@ -11714,7 +11694,7 @@ def __init__( ): super(Trigger, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) - self.type = 'Trigger' + self.type = 'Trigger' # type: str self.description = kwargs.get('description', None) self.runtime_state = None self.annotations = kwargs.get('annotations', None) @@ -11762,7 +11742,7 @@ def __init__( **kwargs ): super(MultiplePipelineTrigger, self).__init__(**kwargs) - self.type = 'MultiplePipelineTrigger' + self.type = 'MultiplePipelineTrigger' # type: str self.pipelines = kwargs.get('pipelines', None) @@ -11816,7 +11796,7 @@ def __init__( **kwargs ): super(MySqlLinkedService, self).__init__(**kwargs) - self.type = 'MySql' + self.type = 'MySql' # type: str self.connection_string = kwargs['connection_string'] self.password = kwargs.get('password', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) @@ -11876,7 +11856,7 @@ def __init__( **kwargs ): super(MySqlTableDataset, self).__init__(**kwargs) - self.type = 'MySqlTable' + self.type = 'MySqlTable' # type: str self.table_name = kwargs.get('table_name', None) @@ -11930,7 +11910,7 @@ def __init__( **kwargs ): super(NetezzaLinkedService, self).__init__(**kwargs) - self.type = 'Netezza' + self.type = 'Netezza' # type: str self.connection_string = kwargs.get('connection_string', None) self.pwd = kwargs.get('pwd', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) @@ -11999,7 +11979,7 @@ def __init__( **kwargs ): super(NetezzaTableDataset, self).__init__(**kwargs) - self.type = 'NetezzaTable' + self.type = 'NetezzaTable' # type: str self.table_name = kwargs.get('table_name', None) self.table = kwargs.get('table', None) self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) @@ -12464,7 +12444,7 @@ def __init__( **kwargs ): super(ODataLinkedService, self).__init__(**kwargs) - self.type = 'OData' + self.type = 'OData' # type: str self.url = kwargs['url'] self.authentication_type = kwargs.get('authentication_type', None) self.user_name = kwargs.get('user_name', None) @@ -12533,7 +12513,7 @@ def __init__( **kwargs ): super(ODataResourceDataset, self).__init__(**kwargs) - self.type = 'ODataResource' + self.type = 'ODataResource' # type: str self.path = kwargs.get('path', None) @@ -12601,7 +12581,7 @@ def __init__( **kwargs ): super(OdbcLinkedService, self).__init__(**kwargs) - self.type = 'Odbc' + self.type = 'Odbc' # type: str self.connection_string = kwargs['connection_string'] self.authentication_type = kwargs.get('authentication_type', None) self.credential = kwargs.get('credential', None) @@ -12664,7 +12644,7 @@ def __init__( **kwargs ): super(OdbcTableDataset, self).__init__(**kwargs) - self.type = 'OdbcTable' + self.type = 'OdbcTable' # type: str self.table_name = kwargs.get('table_name', None) @@ -12728,7 +12708,7 @@ def __init__( **kwargs ): super(Office365Dataset, self).__init__(**kwargs) - self.type = 'Office365Table' + self.type = 'Office365Table' # type: str self.table_name = kwargs['table_name'] self.predicate = kwargs.get('predicate', None) @@ -12795,7 +12775,7 @@ def __init__( **kwargs ): super(Office365LinkedService, self).__init__(**kwargs) - self.type = 'Office365' + self.type = 'Office365' # type: str self.office365_tenant_id = kwargs['office365_tenant_id'] self.service_principal_tenant_id = kwargs['service_principal_tenant_id'] self.service_principal_id = kwargs['service_principal_id'] @@ -12854,7 +12834,7 @@ def __init__( **kwargs ): super(OracleLinkedService, self).__init__(**kwargs) - self.type = 'Oracle' + self.type = 'Oracle' # type: str self.connection_string = kwargs['connection_string'] self.password = kwargs.get('password', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) @@ -12930,7 +12910,7 @@ def __init__( **kwargs ): super(OracleServiceCloudLinkedService, self).__init__(**kwargs) - self.type = 'OracleServiceCloud' + self.type = 'OracleServiceCloud' # type: str self.host = kwargs['host'] self.username = kwargs['username'] self.password = kwargs['password'] @@ -12994,7 +12974,7 @@ def __init__( **kwargs ): super(OracleServiceCloudObjectDataset, self).__init__(**kwargs) - self.type = 'OracleServiceCloudObject' + self.type = 'OracleServiceCloudObject' # type: str self.table_name = kwargs.get('table_name', None) @@ -13061,7 +13041,7 @@ def __init__( **kwargs ): super(OracleTableDataset, self).__init__(**kwargs) - self.type = 'OracleTable' + self.type = 'OracleTable' # type: str self.table_name = kwargs.get('table_name', None) self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) self.table = kwargs.get('table', None) @@ -13124,7 +13104,7 @@ def __init__( **kwargs ): super(OrcDataset, self).__init__(**kwargs) - self.type = 'Orc' + self.type = 'Orc' # type: str self.location = kwargs.get('location', None) self.orc_compression_codec = kwargs.get('orc_compression_codec', None) @@ -13216,7 +13196,7 @@ def __init__( **kwargs ): super(ParquetDataset, self).__init__(**kwargs) - self.type = 'Parquet' + self.type = 'Parquet' # type: str self.location = kwargs.get('location', None) self.compression_codec = kwargs.get('compression_codec', None) @@ -13288,7 +13268,7 @@ def __init__( **kwargs ): super(PaypalLinkedService, self).__init__(**kwargs) - self.type = 'Paypal' + self.type = 'Paypal' # type: str self.host = kwargs['host'] self.client_id = kwargs['client_id'] self.client_secret = kwargs.get('client_secret', None) @@ -13352,7 +13332,7 @@ def __init__( **kwargs ): super(PaypalObjectDataset, self).__init__(**kwargs) - self.type = 'PaypalObject' + self.type = 'PaypalObject' # type: str self.table_name = kwargs.get('table_name', None) @@ -13446,7 +13426,7 @@ def __init__( **kwargs ): super(PhoenixLinkedService, self).__init__(**kwargs) - self.type = 'Phoenix' + self.type = 'Phoenix' # type: str self.host = kwargs['host'] self.port = kwargs.get('port', None) self.http_path = kwargs.get('http_path', None) @@ -13524,7 +13504,7 @@ def __init__( **kwargs ): super(PhoenixObjectDataset, self).__init__(**kwargs) - self.type = 'PhoenixObject' + self.type = 'PhoenixObject' # type: str self.table_name = kwargs.get('table_name', None) self.table = kwargs.get('table', None) self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) @@ -13581,12 +13561,10 @@ def __init__( class PipelineReference(msrest.serialization.Model): """Pipeline reference type. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. - :ivar type: Required. Pipeline reference type. Default value: "PipelineReference". - :vartype type: str + :param type: Required. Pipeline reference type. Possible values include: "PipelineReference". + :type type: str or ~azure.synapse.artifacts.models.PipelineReferenceType :param reference_name: Required. Reference pipeline name. :type reference_name: str :param name: Reference name. @@ -13594,7 +13572,7 @@ class PipelineReference(msrest.serialization.Model): """ _validation = { - 'type': {'required': True, 'constant': True}, + 'type': {'required': True}, 'reference_name': {'required': True}, } @@ -13604,13 +13582,12 @@ class PipelineReference(msrest.serialization.Model): 'name': {'key': 'name', 'type': 'str'}, } - type = "PipelineReference" - def __init__( self, **kwargs ): super(PipelineReference, self).__init__(**kwargs) + self.type = kwargs['type'] self.reference_name = kwargs['reference_name'] self.name = kwargs.get('name', None) @@ -13891,7 +13868,7 @@ def __init__( **kwargs ): super(PostgreSqlLinkedService, self).__init__(**kwargs) - self.type = 'PostgreSql' + self.type = 'PostgreSql' # type: str self.connection_string = kwargs['connection_string'] self.password = kwargs.get('password', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) @@ -13959,7 +13936,7 @@ def __init__( **kwargs ): super(PostgreSqlTableDataset, self).__init__(**kwargs) - self.type = 'PostgreSqlTable' + self.type = 'PostgreSqlTable' # type: str self.table_name = kwargs.get('table_name', None) self.table = kwargs.get('table', None) self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) @@ -14061,7 +14038,7 @@ def __init__( **kwargs ): super(PrestoLinkedService, self).__init__(**kwargs) - self.type = 'Presto' + self.type = 'Presto' # type: str self.host = kwargs['host'] self.server_version = kwargs['server_version'] self.catalog = kwargs['catalog'] @@ -14141,7 +14118,7 @@ def __init__( **kwargs ): super(PrestoObjectDataset, self).__init__(**kwargs) - self.type = 'PrestoObject' + self.type = 'PrestoObject' # type: str self.table_name = kwargs.get('table_name', None) self.table = kwargs.get('table', None) self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) @@ -14242,7 +14219,7 @@ def __init__( **kwargs ): super(QuickBooksLinkedService, self).__init__(**kwargs) - self.type = 'QuickBooks' + self.type = 'QuickBooks' # type: str self.endpoint = kwargs['endpoint'] self.company_id = kwargs['company_id'] self.consumer_key = kwargs['consumer_key'] @@ -14307,7 +14284,7 @@ def __init__( **kwargs ): super(QuickBooksObjectDataset, self).__init__(**kwargs) - self.type = 'QuickBooksObject' + self.type = 'QuickBooksObject' # type: str self.table_name = kwargs.get('table_name', None) @@ -14404,7 +14381,7 @@ def __init__( **kwargs ): super(RelationalTableDataset, self).__init__(**kwargs) - self.type = 'RelationalTable' + self.type = 'RelationalTable' # type: str self.table_name = kwargs.get('table_name', None) @@ -14541,7 +14518,7 @@ def __init__( **kwargs ): super(RerunTumblingWindowTrigger, self).__init__(**kwargs) - self.type = 'RerunTumblingWindowTrigger' + self.type = 'RerunTumblingWindowTrigger' # type: str self.parent_trigger = kwargs.get('parent_trigger', None) self.requested_start_time = kwargs['requested_start_time'] self.requested_end_time = kwargs['requested_end_time'] @@ -14704,7 +14681,7 @@ def __init__( **kwargs ): super(ResponsysLinkedService, self).__init__(**kwargs) - self.type = 'Responsys' + self.type = 'Responsys' # type: str self.endpoint = kwargs['endpoint'] self.client_id = kwargs['client_id'] self.client_secret = kwargs.get('client_secret', None) @@ -14768,7 +14745,7 @@ def __init__( **kwargs ): super(ResponsysObjectDataset, self).__init__(**kwargs) - self.type = 'ResponsysObject' + self.type = 'ResponsysObject' # type: str self.table_name = kwargs.get('table_name', None) @@ -14843,7 +14820,7 @@ def __init__( **kwargs ): super(RestResourceDataset, self).__init__(**kwargs) - self.type = 'RestResource' + self.type = 'RestResource' # type: str self.relative_url = kwargs.get('relative_url', None) self.request_method = kwargs.get('request_method', None) self.request_body = kwargs.get('request_body', None) @@ -14930,7 +14907,7 @@ def __init__( **kwargs ): super(RestServiceLinkedService, self).__init__(**kwargs) - self.type = 'RestService' + self.type = 'RestService' # type: str self.url = kwargs['url'] self.enable_server_certificate_validation = kwargs.get('enable_server_certificate_validation', None) self.authentication_type = kwargs['authentication_type'] @@ -15123,7 +15100,7 @@ def __init__( **kwargs ): super(SalesforceLinkedService, self).__init__(**kwargs) - self.type = 'Salesforce' + self.type = 'Salesforce' # type: str self.environment_url = kwargs.get('environment_url', None) self.username = kwargs.get('username', None) self.password = kwargs.get('password', None) @@ -15197,7 +15174,7 @@ def __init__( **kwargs ): super(SalesforceMarketingCloudLinkedService, self).__init__(**kwargs) - self.type = 'SalesforceMarketingCloud' + self.type = 'SalesforceMarketingCloud' # type: str self.client_id = kwargs['client_id'] self.client_secret = kwargs.get('client_secret', None) self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) @@ -15260,7 +15237,7 @@ def __init__( **kwargs ): super(SalesforceMarketingCloudObjectDataset, self).__init__(**kwargs) - self.type = 'SalesforceMarketingCloudObject' + self.type = 'SalesforceMarketingCloudObject' # type: str self.table_name = kwargs.get('table_name', None) @@ -15319,7 +15296,7 @@ def __init__( **kwargs ): super(SalesforceObjectDataset, self).__init__(**kwargs) - self.type = 'SalesforceObject' + self.type = 'SalesforceObject' # type: str self.object_api_name = kwargs.get('object_api_name', None) @@ -15386,7 +15363,7 @@ def __init__( **kwargs ): super(SalesforceServiceCloudLinkedService, self).__init__(**kwargs) - self.type = 'SalesforceServiceCloud' + self.type = 'SalesforceServiceCloud' # type: str self.environment_url = kwargs.get('environment_url', None) self.username = kwargs.get('username', None) self.password = kwargs.get('password', None) @@ -15450,7 +15427,7 @@ def __init__( **kwargs ): super(SalesforceServiceCloudObjectDataset, self).__init__(**kwargs) - self.type = 'SalesforceServiceCloudObject' + self.type = 'SalesforceServiceCloudObject' # type: str self.object_api_name = kwargs.get('object_api_name', None) @@ -15505,7 +15482,7 @@ def __init__( **kwargs ): super(SapBwCubeDataset, self).__init__(**kwargs) - self.type = 'SapBwCube' + self.type = 'SapBwCube' # type: str class SapBWLinkedService(LinkedService): @@ -15573,7 +15550,7 @@ def __init__( **kwargs ): super(SapBWLinkedService, self).__init__(**kwargs) - self.type = 'SapBW' + self.type = 'SapBW' # type: str self.server = kwargs['server'] self.system_number = kwargs['system_number'] self.client_id = kwargs['client_id'] @@ -15638,7 +15615,7 @@ def __init__( **kwargs ): super(SapCloudForCustomerLinkedService, self).__init__(**kwargs) - self.type = 'SapCloudForCustomer' + self.type = 'SapCloudForCustomer' # type: str self.url = kwargs['url'] self.username = kwargs.get('username', None) self.password = kwargs.get('password', None) @@ -15701,7 +15678,7 @@ def __init__( **kwargs ): super(SapCloudForCustomerResourceDataset, self).__init__(**kwargs) - self.type = 'SapCloudForCustomerResource' + self.type = 'SapCloudForCustomerResource' # type: str self.path = kwargs['path'] @@ -15761,7 +15738,7 @@ def __init__( **kwargs ): super(SapEccLinkedService, self).__init__(**kwargs) - self.type = 'SapEcc' + self.type = 'SapEcc' # type: str self.url = kwargs['url'] self.username = kwargs.get('username', None) self.password = kwargs.get('password', None) @@ -15824,7 +15801,7 @@ def __init__( **kwargs ): super(SapEccResourceDataset, self).__init__(**kwargs) - self.type = 'SapEccResource' + self.type = 'SapEccResource' # type: str self.path = kwargs['path'] @@ -15891,7 +15868,7 @@ def __init__( **kwargs ): super(SapHanaLinkedService, self).__init__(**kwargs) - self.type = 'SapHana' + self.type = 'SapHana' # type: str self.connection_string = kwargs.get('connection_string', None) self.server = kwargs['server'] self.authentication_type = kwargs.get('authentication_type', None) @@ -15958,7 +15935,7 @@ def __init__( **kwargs ): super(SapHanaTableDataset, self).__init__(**kwargs) - self.type = 'SapHanaTable' + self.type = 'SapHanaTable' # type: str self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) self.table = kwargs.get('table', None) @@ -16035,7 +16012,7 @@ def __init__( **kwargs ): super(SapOpenHubLinkedService, self).__init__(**kwargs) - self.type = 'SapOpenHub' + self.type = 'SapOpenHub' # type: str self.server = kwargs['server'] self.system_number = kwargs['system_number'] self.client_id = kwargs['client_id'] @@ -16110,7 +16087,7 @@ def __init__( **kwargs ): super(SapOpenHubTableDataset, self).__init__(**kwargs) - self.type = 'SapOpenHubTable' + self.type = 'SapOpenHubTable' # type: str self.open_hub_destination_name = kwargs['open_hub_destination_name'] self.exclude_last_request = kwargs.get('exclude_last_request', None) self.base_request_id = kwargs.get('base_request_id', None) @@ -16220,7 +16197,7 @@ def __init__( **kwargs ): super(SapTableLinkedService, self).__init__(**kwargs) - self.type = 'SapTable' + self.type = 'SapTable' # type: str self.server = kwargs.get('server', None) self.system_number = kwargs.get('system_number', None) self.client_id = kwargs.get('client_id', None) @@ -16295,7 +16272,7 @@ def __init__( **kwargs ): super(SapTableResourceDataset, self).__init__(**kwargs) - self.type = 'SapTableResource' + self.type = 'SapTableResource' # type: str self.table_name = kwargs['table_name'] @@ -16365,7 +16342,7 @@ def __init__( **kwargs ): super(SecureString, self).__init__(**kwargs) - self.type = 'SecureString' + self.type = 'SecureString' # type: str self.value = kwargs['value'] @@ -16449,7 +16426,7 @@ def __init__( **kwargs ): super(ServiceNowLinkedService, self).__init__(**kwargs) - self.type = 'ServiceNow' + self.type = 'ServiceNow' # type: str self.endpoint = kwargs['endpoint'] self.authentication_type = kwargs['authentication_type'] self.username = kwargs.get('username', None) @@ -16516,7 +16493,7 @@ def __init__( **kwargs ): super(ServiceNowObjectDataset, self).__init__(**kwargs) - self.type = 'ServiceNowObject' + self.type = 'ServiceNowObject' # type: str self.table_name = kwargs.get('table_name', None) @@ -16565,7 +16542,7 @@ def __init__( **kwargs ): super(SetVariableActivity, self).__init__(**kwargs) - self.type = 'SetVariable' + self.type = 'SetVariable' # type: str self.variable_name = kwargs.get('variable_name', None) self.value = kwargs.get('value', None) @@ -16657,7 +16634,7 @@ def __init__( **kwargs ): super(SftpServerLinkedService, self).__init__(**kwargs) - self.type = 'Sftp' + self.type = 'Sftp' # type: str self.host = kwargs['host'] self.port = kwargs.get('port', None) self.authentication_type = kwargs.get('authentication_type', None) @@ -16735,7 +16712,7 @@ def __init__( **kwargs ): super(ShopifyLinkedService, self).__init__(**kwargs) - self.type = 'Shopify' + self.type = 'Shopify' # type: str self.host = kwargs['host'] self.access_token = kwargs.get('access_token', None) self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) @@ -16798,7 +16775,7 @@ def __init__( **kwargs ): super(ShopifyObjectDataset, self).__init__(**kwargs) - self.type = 'ShopifyObject' + self.type = 'ShopifyObject' # type: str self.table_name = kwargs.get('table_name', None) @@ -17247,7 +17224,7 @@ def __init__( **kwargs ): super(SparkLinkedService, self).__init__(**kwargs) - self.type = 'Spark' + self.type = 'Spark' # type: str self.host = kwargs['host'] self.port = kwargs['port'] self.server_type = kwargs.get('server_type', None) @@ -17326,7 +17303,7 @@ def __init__( **kwargs ): super(SparkObjectDataset, self).__init__(**kwargs) - self.type = 'SparkObject' + self.type = 'SparkObject' # type: str self.table_name = kwargs.get('table_name', None) self.table = kwargs.get('table', None) self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) @@ -17541,11 +17518,98 @@ def __init__( self.name = kwargs['name'] +class SqlPoolReference(msrest.serialization.Model): + """SQL pool reference type. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. SQL pool reference type. Possible values include: "SqlPoolReference". + :type type: str or ~azure.synapse.artifacts.models.SqlPoolReferenceType + :param reference_name: Required. Reference SQL pool name. + :type reference_name: str + """ + + _validation = { + 'type': {'required': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(SqlPoolReference, self).__init__(**kwargs) + self.type = kwargs['type'] + self.reference_name = kwargs['reference_name'] + + +class SqlPoolStoredProcedureActivity(Activity): + """Execute SQL pool stored procedure activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param sql_pool: Required. SQL pool stored procedure reference. + :type sql_pool: ~azure.synapse.artifacts.models.SqlPoolReference + :param stored_procedure_name: Required. Stored procedure name. Type: string (or Expression with + resultType string). + :type stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored procedure parameters. + Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.synapse.artifacts.models.StoredProcedureParameter] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'sql_pool': {'required': True}, + 'stored_procedure_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'sql_pool': {'key': 'sqlPool', 'type': 'SqlPoolReference'}, + 'stored_procedure_name': {'key': 'typeProperties.storedProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'typeProperties.storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + } + + def __init__( + self, + **kwargs + ): + super(SqlPoolStoredProcedureActivity, self).__init__(**kwargs) + self.type = 'SqlPoolStoredProcedure' # type: str + self.sql_pool = kwargs['sql_pool'] + self.stored_procedure_name = kwargs['stored_procedure_name'] + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + + class SqlScript(msrest.serialization.Model): """SQL script. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this @@ -17553,14 +17617,13 @@ class SqlScript(msrest.serialization.Model): :type additional_properties: dict[str, object] :param description: The description of the SQL script. :type description: str - :ivar type: The type of the SQL script. Default value: "SqlQuery". - :vartype type: str + :param type: The type of the SQL script. Possible values include: "SqlQuery". + :type type: str or ~azure.synapse.artifacts.models.SqlScriptType :param content: Required. The content of the SQL script. :type content: ~azure.synapse.artifacts.models.SqlScriptContent """ _validation = { - 'type': {'constant': True}, 'content': {'required': True}, } @@ -17571,8 +17634,6 @@ class SqlScript(msrest.serialization.Model): 'content': {'key': 'content', 'type': 'SqlScriptContent'}, } - type = "SqlQuery" - def __init__( self, **kwargs @@ -17580,6 +17641,7 @@ def __init__( super(SqlScript, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) self.description = kwargs.get('description', None) + self.type = kwargs.get('type', None) self.content = kwargs['content'] @@ -17773,7 +17835,7 @@ def __init__( **kwargs ): super(SqlServerLinkedService, self).__init__(**kwargs) - self.type = 'SqlServer' + self.type = 'SqlServer' # type: str self.connection_string = kwargs['connection_string'] self.user_name = kwargs.get('user_name', None) self.password = kwargs.get('password', None) @@ -17835,7 +17897,7 @@ def __init__( **kwargs ): super(SqlServerStoredProcedureActivity, self).__init__(**kwargs) - self.type = 'SqlServerStoredProcedure' + self.type = 'SqlServerStoredProcedure' # type: str self.stored_procedure_name = kwargs['stored_procedure_name'] self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) @@ -17903,7 +17965,7 @@ def __init__( **kwargs ): super(SqlServerTableDataset, self).__init__(**kwargs) - self.type = 'SqlServerTable' + self.type = 'SqlServerTable' # type: str self.table_name = kwargs.get('table_name', None) self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) self.table = kwargs.get('table', None) @@ -17981,7 +18043,7 @@ def __init__( **kwargs ): super(SquareLinkedService, self).__init__(**kwargs) - self.type = 'Square' + self.type = 'Square' # type: str self.host = kwargs['host'] self.client_id = kwargs['client_id'] self.client_secret = kwargs.get('client_secret', None) @@ -18046,7 +18108,7 @@ def __init__( **kwargs ): super(SquareObjectDataset, self).__init__(**kwargs) - self.type = 'SquareObject' + self.type = 'SquareObject' # type: str self.table_name = kwargs.get('table_name', None) @@ -18189,15 +18251,13 @@ def __init__( class SSISLogLocation(msrest.serialization.Model): """SSIS package execution log location. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param log_path: Required. The SSIS package execution log path. Type: string (or Expression with resultType string). :type log_path: object - :ivar type: Required. The type of SSIS log location. Default value: "File". - :vartype type: str + :param type: Required. The type of SSIS log location. Possible values include: "File". + :type type: str or ~azure.synapse.artifacts.models.SsisLogLocationType :param access_credential: The package execution log access credential. :type access_credential: ~azure.synapse.artifacts.models.SSISAccessCredential :param log_refresh_interval: Specifies the interval to refresh log. The default interval is 5 @@ -18208,7 +18268,7 @@ class SSISLogLocation(msrest.serialization.Model): _validation = { 'log_path': {'required': True}, - 'type': {'required': True, 'constant': True}, + 'type': {'required': True}, } _attribute_map = { @@ -18218,14 +18278,13 @@ class SSISLogLocation(msrest.serialization.Model): 'log_refresh_interval': {'key': 'typeProperties.logRefreshInterval', 'type': 'object'}, } - type = "File" - def __init__( self, **kwargs ): super(SSISLogLocation, self).__init__(**kwargs) self.log_path = kwargs['log_path'] + self.type = kwargs['type'] self.access_credential = kwargs.get('access_credential', None) self.log_refresh_interval = kwargs.get('log_refresh_interval', None) @@ -18527,7 +18586,7 @@ def __init__( **kwargs ): super(SwitchActivity, self).__init__(**kwargs) - self.type = 'Switch' + self.type = 'Switch' # type: str self.on = kwargs['on'] self.cases = kwargs.get('cases', None) self.default_activities = kwargs.get('default_activities', None) @@ -18623,7 +18682,7 @@ def __init__( **kwargs ): super(SybaseLinkedService, self).__init__(**kwargs) - self.type = 'Sybase' + self.type = 'Sybase' # type: str self.server = kwargs['server'] self.database = kwargs['database'] self.schema = kwargs.get('schema', None) @@ -18687,10 +18746,170 @@ def __init__( **kwargs ): super(SybaseTableDataset, self).__init__(**kwargs) - self.type = 'SybaseTable' + self.type = 'SybaseTable' # type: str self.table_name = kwargs.get('table_name', None) +class SynapseNotebookActivity(Activity): + """Execute Synapse notebook activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param notebook: Required. Synapse notebook reference. + :type notebook: ~azure.synapse.artifacts.models.SynapseNotebookReference + :param parameters: Notebook parameters. + :type parameters: dict[str, object] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'notebook': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'notebook': {'key': 'typeProperties.notebook', 'type': 'SynapseNotebookReference'}, + 'parameters': {'key': 'typeProperties.parameters', 'type': '{object}'}, + } + + def __init__( + self, + **kwargs + ): + super(SynapseNotebookActivity, self).__init__(**kwargs) + self.type = 'SynapseNotebook' # type: str + self.notebook = kwargs['notebook'] + self.parameters = kwargs.get('parameters', None) + + +class SynapseNotebookReference(msrest.serialization.Model): + """Synapse notebook reference type. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Synapse notebook reference type. Possible values include: + "NotebookReference". + :type type: str or ~azure.synapse.artifacts.models.NotebookReferenceType + :param reference_name: Required. Reference notebook name. + :type reference_name: str + """ + + _validation = { + 'type': {'required': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(SynapseNotebookReference, self).__init__(**kwargs) + self.type = kwargs['type'] + self.reference_name = kwargs['reference_name'] + + +class SynapseSparkJobDefinitionActivity(Activity): + """Execute spark job activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param spark_job: Required. Synapse spark job reference. + :type spark_job: ~azure.synapse.artifacts.models.SynapseSparkJobReference + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'spark_job': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'spark_job': {'key': 'typeProperties.sparkJob', 'type': 'SynapseSparkJobReference'}, + } + + def __init__( + self, + **kwargs + ): + super(SynapseSparkJobDefinitionActivity, self).__init__(**kwargs) + self.type = 'SparkJob' # type: str + self.spark_job = kwargs['spark_job'] + + +class SynapseSparkJobReference(msrest.serialization.Model): + """Synapse spark job reference type. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Synapse spark job reference type. Possible values include: + "SparkJobDefinitionReference". + :type type: str or ~azure.synapse.artifacts.models.SparkJobReferenceType + :param reference_name: Required. Reference spark job name. + :type reference_name: str + """ + + _validation = { + 'type': {'required': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(SynapseSparkJobReference, self).__init__(**kwargs) + self.type = kwargs['type'] + self.reference_name = kwargs['reference_name'] + + class TeradataLinkedService(LinkedService): """Linked service for Teradata data source. @@ -18752,7 +18971,7 @@ def __init__( **kwargs ): super(TeradataLinkedService, self).__init__(**kwargs) - self.type = 'Teradata' + self.type = 'Teradata' # type: str self.connection_string = kwargs.get('connection_string', None) self.server = kwargs.get('server', None) self.authentication_type = kwargs.get('authentication_type', None) @@ -18819,7 +19038,7 @@ def __init__( **kwargs ): super(TeradataTableDataset, self).__init__(**kwargs) - self.type = 'TeradataTable' + self.type = 'TeradataTable' # type: str self.database = kwargs.get('database', None) self.table = kwargs.get('table', None) @@ -19131,7 +19350,7 @@ def __init__( **kwargs ): super(UntilActivity, self).__init__(**kwargs) - self.type = 'Until' + self.type = 'Until' # type: str self.expression = kwargs['expression'] self.timeout = kwargs.get('timeout', None) self.activities = kwargs['activities'] @@ -19230,7 +19449,7 @@ def __init__( **kwargs ): super(ValidationActivity, self).__init__(**kwargs) - self.type = 'Validation' + self.type = 'Validation' # type: str self.timeout = kwargs.get('timeout', None) self.sleep = kwargs.get('sleep', None) self.minimum_size = kwargs.get('minimum_size', None) @@ -19318,7 +19537,7 @@ def __init__( **kwargs ): super(VerticaLinkedService, self).__init__(**kwargs) - self.type = 'Vertica' + self.type = 'Vertica' # type: str self.connection_string = kwargs.get('connection_string', None) self.pwd = kwargs.get('pwd', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) @@ -19387,7 +19606,7 @@ def __init__( **kwargs ): super(VerticaTableDataset, self).__init__(**kwargs) - self.type = 'VerticaTable' + self.type = 'VerticaTable' # type: str self.table_name = kwargs.get('table_name', None) self.table = kwargs.get('table', None) self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) @@ -19436,7 +19655,7 @@ def __init__( **kwargs ): super(WaitActivity, self).__init__(**kwargs) - self.type = 'Wait' + self.type = 'Wait' # type: str self.wait_time_in_seconds = kwargs['wait_time_in_seconds'] @@ -19516,7 +19735,7 @@ def __init__( **kwargs ): super(WebActivity, self).__init__(**kwargs) - self.type = 'WebActivity' + self.type = 'WebActivity' # type: str self.method = kwargs['method'] self.url = kwargs['url'] self.headers = kwargs.get('headers', None) @@ -19606,7 +19825,7 @@ def __init__( ): super(WebLinkedServiceTypeProperties, self).__init__(**kwargs) self.url = kwargs['url'] - self.authentication_type = None + self.authentication_type = None # type: Optional[str] class WebAnonymousAuthentication(WebLinkedServiceTypeProperties): @@ -19638,7 +19857,7 @@ def __init__( **kwargs ): super(WebAnonymousAuthentication, self).__init__(**kwargs) - self.authentication_type = 'Anonymous' + self.authentication_type = 'Anonymous' # type: str class WebBasicAuthentication(WebLinkedServiceTypeProperties): @@ -19679,7 +19898,7 @@ def __init__( **kwargs ): super(WebBasicAuthentication, self).__init__(**kwargs) - self.authentication_type = 'Basic' + self.authentication_type = 'Basic' # type: str self.username = kwargs['username'] self.password = kwargs['password'] @@ -19721,7 +19940,7 @@ def __init__( **kwargs ): super(WebClientCertificateAuthentication, self).__init__(**kwargs) - self.authentication_type = 'ClientCertificate' + self.authentication_type = 'ClientCertificate' # type: str self.pfx = kwargs['pfx'] self.password = kwargs['password'] @@ -19729,8 +19948,6 @@ def __init__( class WebHookActivity(Activity): """WebHook activity. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this @@ -19746,8 +19963,8 @@ class WebHookActivity(Activity): :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :ivar method: Required. Rest API method for target endpoint. Default value: "POST". - :vartype method: str + :param method: Required. Rest API method for target endpoint. Possible values include: "POST". + :type method: str or ~azure.synapse.artifacts.models.WebHookActivityMethod :param url: Required. WebHook activity target endpoint and path. Type: string (or Expression with resultType string). :type url: object @@ -19774,7 +19991,7 @@ class WebHookActivity(Activity): _validation = { 'name': {'required': True}, 'type': {'required': True}, - 'method': {'required': True, 'constant': True}, + 'method': {'required': True}, 'url': {'required': True}, } @@ -19794,14 +20011,13 @@ class WebHookActivity(Activity): 'report_status_on_call_back': {'key': 'typeProperties.reportStatusOnCallBack', 'type': 'object'}, } - method = "POST" - def __init__( self, **kwargs ): super(WebHookActivity, self).__init__(**kwargs) - self.type = 'WebHook' + self.type = 'WebHook' # type: str + self.method = kwargs['method'] self.url = kwargs['url'] self.timeout = kwargs.get('timeout', None) self.headers = kwargs.get('headers', None) @@ -19852,7 +20068,7 @@ def __init__( **kwargs ): super(WebLinkedService, self).__init__(**kwargs) - self.type = 'Web' + self.type = 'Web' # type: str self.type_properties = kwargs['type_properties'] @@ -19916,7 +20132,7 @@ def __init__( **kwargs ): super(WebTableDataset, self).__init__(**kwargs) - self.type = 'WebTable' + self.type = 'WebTable' # type: str self.index = kwargs['index'] self.path = kwargs.get('path', None) @@ -20125,7 +20341,7 @@ def __init__( **kwargs ): super(XeroLinkedService, self).__init__(**kwargs) - self.type = 'Xero' + self.type = 'Xero' # type: str self.host = kwargs['host'] self.consumer_key = kwargs.get('consumer_key', None) self.private_key = kwargs.get('private_key', None) @@ -20189,7 +20405,7 @@ def __init__( **kwargs ): super(XeroObjectDataset, self).__init__(**kwargs) - self.type = 'XeroObject' + self.type = 'XeroObject' # type: str self.table_name = kwargs.get('table_name', None) @@ -20256,7 +20472,7 @@ def __init__( **kwargs ): super(ZohoLinkedService, self).__init__(**kwargs) - self.type = 'Zoho' + self.type = 'Zoho' # type: str self.endpoint = kwargs['endpoint'] self.access_token = kwargs.get('access_token', None) self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) @@ -20319,5 +20535,5 @@ def __init__( **kwargs ): super(ZohoObjectDataset, self).__init__(**kwargs) - self.type = 'ZohoObject' + self.type = 'ZohoObject' # type: str self.table_name = kwargs.get('table_name', None) diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_models_py3.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_models_py3.py index 486baee4e7de..97879f461074 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_models_py3.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_models_py3.py @@ -19,7 +19,7 @@ class Activity(msrest.serialization.Model): """A pipeline activity. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AppendVariableActivity, ControlActivity, ExecutePipelineActivity, ExecutionActivity, FilterActivity, ForEachActivity, IfConditionActivity, SetVariableActivity, SwitchActivity, UntilActivity, ValidationActivity, WaitActivity, WebHookActivity. + sub-classes are: AppendVariableActivity, ControlActivity, ExecutePipelineActivity, ExecutionActivity, FilterActivity, ForEachActivity, IfConditionActivity, SetVariableActivity, SynapseSparkJobDefinitionActivity, SqlPoolStoredProcedureActivity, SwitchActivity, SynapseNotebookActivity, UntilActivity, ValidationActivity, WaitActivity, WebHookActivity. All required parameters must be populated in order to send to Azure. @@ -53,7 +53,7 @@ class Activity(msrest.serialization.Model): } _subtype_map = { - 'type': {'AppendVariable': 'AppendVariableActivity', 'Container': 'ControlActivity', 'ExecutePipeline': 'ExecutePipelineActivity', 'Execution': 'ExecutionActivity', 'Filter': 'FilterActivity', 'ForEach': 'ForEachActivity', 'IfCondition': 'IfConditionActivity', 'SetVariable': 'SetVariableActivity', 'Switch': 'SwitchActivity', 'Until': 'UntilActivity', 'Validation': 'ValidationActivity', 'Wait': 'WaitActivity', 'WebHook': 'WebHookActivity'} + 'type': {'AppendVariable': 'AppendVariableActivity', 'Container': 'ControlActivity', 'ExecutePipeline': 'ExecutePipelineActivity', 'Execution': 'ExecutionActivity', 'Filter': 'FilterActivity', 'ForEach': 'ForEachActivity', 'IfCondition': 'IfConditionActivity', 'SetVariable': 'SetVariableActivity', 'SparkJob': 'SynapseSparkJobDefinitionActivity', 'SqlPoolStoredProcedure': 'SqlPoolStoredProcedureActivity', 'Switch': 'SwitchActivity', 'SynapseNotebook': 'SynapseNotebookActivity', 'Until': 'UntilActivity', 'Validation': 'ValidationActivity', 'Wait': 'WaitActivity', 'WebHook': 'WebHookActivity'} } def __init__( @@ -69,7 +69,7 @@ def __init__( super(Activity, self).__init__(**kwargs) self.additional_properties = additional_properties self.name = name - self.type: str = 'Activity' + self.type = 'Activity' # type: str self.description = description self.depends_on = depends_on self.user_properties = user_properties @@ -369,7 +369,7 @@ def __init__( ): super(LinkedService, self).__init__(**kwargs) self.additional_properties = additional_properties - self.type: str = 'LinkedService' + self.type = 'LinkedService' # type: str self.connect_via = connect_via self.description = description self.parameters = parameters @@ -473,7 +473,7 @@ def __init__( **kwargs ): super(AmazonMWSLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'AmazonMWS' + self.type = 'AmazonMWS' # type: str self.endpoint = endpoint self.marketplace_id = marketplace_id self.seller_id = seller_id @@ -554,7 +554,7 @@ def __init__( ): super(Dataset, self).__init__(**kwargs) self.additional_properties = additional_properties - self.type: str = 'Dataset' + self.type = 'Dataset' # type: str self.description = description self.structure = structure self.schema = schema @@ -628,7 +628,7 @@ def __init__( **kwargs ): super(AmazonMWSObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'AmazonMWSObject' + self.type = 'AmazonMWSObject' # type: str self.table_name = table_name @@ -708,7 +708,7 @@ def __init__( **kwargs ): super(AmazonRedshiftLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'AmazonRedshift' + self.type = 'AmazonRedshift' # type: str self.server = server self.username = username self.password = password @@ -792,7 +792,7 @@ def __init__( **kwargs ): super(AmazonRedshiftTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'AmazonRedshiftTable' + self.type = 'AmazonRedshiftTable' # type: str self.table_name = table_name self.table = table self.schema_type_properties_schema = schema_type_properties_schema @@ -864,7 +864,7 @@ def __init__( **kwargs ): super(AmazonS3LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'AmazonS3' + self.type = 'AmazonS3' # type: str self.access_key_id = access_key_id self.secret_access_key = secret_access_key self.service_url = service_url @@ -924,7 +924,7 @@ def __init__( **kwargs ): super(AppendVariableActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.type: str = 'AppendVariable' + self.type = 'AppendVariable' # type: str self.variable_name = variable_name self.value = value @@ -1003,7 +1003,7 @@ def __init__( **kwargs ): super(AvroDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'Avro' + self.type = 'Avro' # type: str self.location = location self.avro_compression_codec = avro_compression_codec self.avro_compression_level = avro_compression_level @@ -1086,7 +1086,7 @@ def __init__( **kwargs ): super(AzureBatchLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'AzureBatch' + self.type = 'AzureBatch' # type: str self.account_name = account_name self.access_key = access_key self.batch_uri = batch_uri @@ -1171,7 +1171,7 @@ def __init__( **kwargs ): super(AzureBlobFSLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'AzureBlobFS' + self.type = 'AzureBlobFS' # type: str self.url = url self.account_key = account_key self.service_principal_id = service_principal_id @@ -1269,7 +1269,7 @@ def __init__( **kwargs ): super(AzureBlobStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'AzureBlobStorage' + self.type = 'AzureBlobStorage' # type: str self.connection_string = connection_string self.account_key = account_key self.sas_uri = sas_uri @@ -1408,7 +1408,7 @@ def __init__( **kwargs ): super(AzureDatabricksLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'AzureDatabricks' + self.type = 'AzureDatabricks' # type: str self.domain = domain self.access_token = access_token self.existing_cluster_id = existing_cluster_id @@ -1485,7 +1485,7 @@ def __init__( **kwargs ): super(ExecutionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.type: str = 'Execution' + self.type = 'Execution' # type: str self.linked_service_name = linked_service_name self.policy = policy @@ -1554,7 +1554,7 @@ def __init__( **kwargs ): super(AzureDataExplorerCommandActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type: str = 'AzureDataExplorerCommand' + self.type = 'AzureDataExplorerCommand' # type: str self.command = command self.command_timeout = command_timeout @@ -1634,7 +1634,7 @@ def __init__( **kwargs ): super(AzureDataExplorerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'AzureDataExplorer' + self.type = 'AzureDataExplorer' # type: str self.endpoint = endpoint self.service_principal_id = service_principal_id self.service_principal_key = service_principal_key @@ -1707,7 +1707,7 @@ def __init__( **kwargs ): super(AzureDataExplorerTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'AzureDataExplorerTable' + self.type = 'AzureDataExplorerTable' # type: str self.table = table @@ -1798,7 +1798,7 @@ def __init__( **kwargs ): super(AzureDataLakeAnalyticsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'AzureDataLakeAnalytics' + self.type = 'AzureDataLakeAnalytics' # type: str self.account_name = account_name self.service_principal_id = service_principal_id self.service_principal_key = service_principal_key @@ -1895,7 +1895,7 @@ def __init__( **kwargs ): super(AzureDataLakeStoreLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'AzureDataLakeStore' + self.type = 'AzureDataLakeStore' # type: str self.data_lake_store_uri = data_lake_store_uri self.service_principal_id = service_principal_id self.service_principal_key = service_principal_key @@ -1971,7 +1971,7 @@ def __init__( **kwargs ): super(AzureFileStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'AzureFileStorage' + self.type = 'AzureFileStorage' # type: str self.host = host self.user_id = user_id self.password = password @@ -2054,7 +2054,7 @@ def __init__( **kwargs ): super(AzureFunctionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type: str = 'AzureFunctionActivity' + self.type = 'AzureFunctionActivity' # type: str self.method = method self.function_name = function_name self.headers = headers @@ -2121,7 +2121,7 @@ def __init__( **kwargs ): super(AzureFunctionLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'AzureFunction' + self.type = 'AzureFunction' # type: str self.function_app_url = function_app_url self.function_key = function_key self.encrypted_credential = encrypted_credential @@ -2177,7 +2177,7 @@ def __init__( **kwargs ): super(AzureKeyVaultLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'AzureKeyVault' + self.type = 'AzureKeyVault' # type: str self.base_url = base_url @@ -2210,7 +2210,7 @@ def __init__( **kwargs ): super(SecretBase, self).__init__(**kwargs) - self.type: Optional[str] = None + self.type = None # type: Optional[str] class AzureKeyVaultSecretReference(SecretBase): @@ -2252,7 +2252,7 @@ def __init__( **kwargs ): super(AzureKeyVaultSecretReference, self).__init__(**kwargs) - self.type: str = 'AzureKeyVaultSecret' + self.type = 'AzureKeyVaultSecret' # type: str self.store = store self.secret_name = secret_name self.secret_version = secret_version @@ -2317,7 +2317,7 @@ def __init__( **kwargs ): super(AzureMariaDBLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'AzureMariaDB' + self.type = 'AzureMariaDB' # type: str self.connection_string = connection_string self.pwd = pwd self.encrypted_credential = encrypted_credential @@ -2387,7 +2387,7 @@ def __init__( **kwargs ): super(AzureMariaDBTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'AzureMariaDBTable' + self.type = 'AzureMariaDBTable' # type: str self.table_name = table_name @@ -2465,7 +2465,7 @@ def __init__( **kwargs ): super(AzureMLBatchExecutionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type: str = 'AzureMLBatchExecution' + self.type = 'AzureMLBatchExecution' # type: str self.global_parameters = global_parameters self.web_service_outputs = web_service_outputs self.web_service_inputs = web_service_inputs @@ -2556,7 +2556,7 @@ def __init__( **kwargs ): super(AzureMLExecutePipelineActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type: str = 'AzureMLExecutePipeline' + self.type = 'AzureMLExecutePipeline' # type: str self.ml_pipeline_id = ml_pipeline_id self.experiment_name = experiment_name self.ml_pipeline_parameters = ml_pipeline_parameters @@ -2646,7 +2646,7 @@ def __init__( **kwargs ): super(AzureMLLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'AzureML' + self.type = 'AzureML' # type: str self.ml_endpoint = ml_endpoint self.api_key = api_key self.update_resource_endpoint = update_resource_endpoint @@ -2740,7 +2740,7 @@ def __init__( **kwargs ): super(AzureMLServiceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'AzureMLService' + self.type = 'AzureMLService' # type: str self.subscription_id = subscription_id self.resource_group_name = resource_group_name self.ml_workspace_name = ml_workspace_name @@ -2822,7 +2822,7 @@ def __init__( **kwargs ): super(AzureMLUpdateResourceActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type: str = 'AzureMLUpdateResource' + self.type = 'AzureMLUpdateResource' # type: str self.trained_model_name = trained_model_name self.trained_model_linked_service_name = trained_model_linked_service_name self.trained_model_file_path = trained_model_file_path @@ -2923,7 +2923,7 @@ def __init__( **kwargs ): super(AzureMySqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'AzureMySql' + self.type = 'AzureMySql' # type: str self.connection_string = connection_string self.password = password self.encrypted_credential = encrypted_credential @@ -2999,7 +2999,7 @@ def __init__( **kwargs ): super(AzureMySqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'AzureMySqlTable' + self.type = 'AzureMySqlTable' # type: str self.table_name = table_name self.table = table @@ -3063,7 +3063,7 @@ def __init__( **kwargs ): super(AzurePostgreSqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'AzurePostgreSql' + self.type = 'AzurePostgreSql' # type: str self.connection_string = connection_string self.password = password self.encrypted_credential = encrypted_credential @@ -3144,7 +3144,7 @@ def __init__( **kwargs ): super(AzurePostgreSqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'AzurePostgreSqlTable' + self.type = 'AzurePostgreSqlTable' # type: str self.table_name = table_name self.table = table self.schema_type_properties_schema = schema_type_properties_schema @@ -3216,7 +3216,7 @@ def __init__( **kwargs ): super(AzureSearchIndexDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'AzureSearchIndex' + self.type = 'AzureSearchIndex' # type: str self.index_name = index_name @@ -3280,7 +3280,7 @@ def __init__( **kwargs ): super(AzureSearchLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'AzureSearch' + self.type = 'AzureSearch' # type: str self.url = url self.key = key self.encrypted_credential = encrypted_credential @@ -3361,7 +3361,7 @@ def __init__( **kwargs ): super(AzureSqlDatabaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'AzureSqlDatabase' + self.type = 'AzureSqlDatabase' # type: str self.connection_string = connection_string self.password = password self.service_principal_id = service_principal_id @@ -3445,7 +3445,7 @@ def __init__( **kwargs ): super(AzureSqlDWLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'AzureSqlDW' + self.type = 'AzureSqlDW' # type: str self.connection_string = connection_string self.password = password self.service_principal_id = service_principal_id @@ -3529,7 +3529,7 @@ def __init__( **kwargs ): super(AzureSqlDWTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'AzureSqlDWTable' + self.type = 'AzureSqlDWTable' # type: str self.table_name = table_name self.schema_type_properties_schema = schema_type_properties_schema self.table = table @@ -3610,7 +3610,7 @@ def __init__( **kwargs ): super(AzureSqlMILinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'AzureSqlMI' + self.type = 'AzureSqlMI' # type: str self.connection_string = connection_string self.password = password self.service_principal_id = service_principal_id @@ -3694,7 +3694,7 @@ def __init__( **kwargs ): super(AzureSqlMITableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'AzureSqlMITable' + self.type = 'AzureSqlMITable' # type: str self.table_name = table_name self.schema_type_properties_schema = schema_type_properties_schema self.table = table @@ -3775,7 +3775,7 @@ def __init__( **kwargs ): super(AzureSqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'AzureSqlTable' + self.type = 'AzureSqlTable' # type: str self.table_name = table_name self.schema_type_properties_schema = schema_type_properties_schema self.table = table @@ -3849,7 +3849,7 @@ def __init__( **kwargs ): super(AzureStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'AzureStorage' + self.type = 'AzureStorage' # type: str self.connection_string = connection_string self.account_key = account_key self.sas_uri = sas_uri @@ -3923,7 +3923,7 @@ def __init__( **kwargs ): super(AzureTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'AzureTable' + self.type = 'AzureTable' # type: str self.table_name = table_name @@ -3995,7 +3995,7 @@ def __init__( **kwargs ): super(AzureTableStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'AzureTableStorage' + self.type = 'AzureTableStorage' # type: str self.connection_string = connection_string self.account_key = account_key self.sas_uri = sas_uri @@ -4006,18 +4006,17 @@ def __init__( class BigDataPoolReference(msrest.serialization.Model): """Big data pool reference. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. - :ivar type: Required. Big data pool reference type. Default value: "BigDataPoolReference". - :vartype type: str + :param type: Required. Big data pool reference type. Possible values include: + "BigDataPoolReference". + :type type: str or ~azure.synapse.artifacts.models.BigDataPoolReferenceType :param reference_name: Required. Reference big data pool name. :type reference_name: str """ _validation = { - 'type': {'required': True, 'constant': True}, + 'type': {'required': True}, 'reference_name': {'required': True}, } @@ -4026,15 +4025,15 @@ class BigDataPoolReference(msrest.serialization.Model): 'reference_name': {'key': 'referenceName', 'type': 'str'}, } - type = "BigDataPoolReference" - def __init__( self, *, + type: Union[str, "BigDataPoolReferenceType"], reference_name: str, **kwargs ): super(BigDataPoolReference, self).__init__(**kwargs) + self.type = type self.reference_name = reference_name @@ -4106,7 +4105,7 @@ def __init__( **kwargs ): super(BinaryDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'Binary' + self.type = 'Binary' # type: str self.location = location self.compression = compression @@ -4186,7 +4185,7 @@ def __init__( **kwargs ): super(CassandraLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Cassandra' + self.type = 'Cassandra' # type: str self.host = host self.authentication_type = authentication_type self.port = port @@ -4265,7 +4264,7 @@ def __init__( **kwargs ): super(CassandraTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'CassandraTable' + self.type = 'CassandraTable' # type: str self.table_name = table_name self.keyspace = keyspace @@ -4378,7 +4377,7 @@ def __init__( **kwargs ): super(CommonDataServiceForAppsEntityDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'CommonDataServiceForAppsEntity' + self.type = 'CommonDataServiceForAppsEntity' # type: str self.entity_name = entity_name @@ -4504,7 +4503,7 @@ def __init__( **kwargs ): super(CommonDataServiceForAppsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'CommonDataServiceForApps' + self.type = 'CommonDataServiceForApps' # type: str self.deployment_type = deployment_type self.host_name = host_name self.port = port @@ -4600,7 +4599,7 @@ def __init__( **kwargs ): super(ConcurLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Concur' + self.type = 'Concur' # type: str self.client_id = client_id self.username = username self.password = password @@ -4674,7 +4673,7 @@ def __init__( **kwargs ): super(ConcurObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'ConcurObject' + self.type = 'ConcurObject' # type: str self.table_name = table_name @@ -4723,7 +4722,7 @@ def __init__( **kwargs ): super(ControlActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.type: str = 'Container' + self.type = 'Container' # type: str class CopyActivity(ExecutionActivity): @@ -4840,7 +4839,7 @@ def __init__( **kwargs ): super(CopyActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type: str = 'Copy' + self.type = 'Copy' # type: str self.inputs = inputs self.outputs = outputs self.source = source @@ -4917,7 +4916,7 @@ def __init__( ): super(CopySink, self).__init__(**kwargs) self.additional_properties = additional_properties - self.type: str = 'CopySink' + self.type = 'CopySink' # type: str self.write_batch_size = write_batch_size self.write_batch_timeout = write_batch_timeout self.sink_retry_count = sink_retry_count @@ -4976,7 +4975,7 @@ def __init__( ): super(CopySource, self).__init__(**kwargs) self.additional_properties = additional_properties - self.type: str = 'CopySource' + self.type = 'CopySource' # type: str self.source_retry_count = source_retry_count self.source_retry_wait = source_retry_wait self.max_concurrent_connections = max_concurrent_connections @@ -5051,7 +5050,7 @@ def __init__( **kwargs ): super(CosmosDbLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'CosmosDb' + self.type = 'CosmosDb' # type: str self.connection_string = connection_string self.account_endpoint = account_endpoint self.database = database @@ -5125,7 +5124,7 @@ def __init__( **kwargs ): super(CosmosDbMongoDbApiCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'CosmosDbMongoDbApiCollection' + self.type = 'CosmosDbMongoDbApiCollection' # type: str self.collection = collection @@ -5186,7 +5185,7 @@ def __init__( **kwargs ): super(CosmosDbMongoDbApiLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'CosmosDbMongoDbApi' + self.type = 'CosmosDbMongoDbApi' # type: str self.connection_string = connection_string self.database = database @@ -5257,7 +5256,7 @@ def __init__( **kwargs ): super(CosmosDbSqlApiCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'CosmosDbSqlApiCollection' + self.type = 'CosmosDbSqlApiCollection' # type: str self.collection_name = collection_name @@ -5320,7 +5319,7 @@ def __init__( **kwargs ): super(CouchbaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Couchbase' + self.type = 'Couchbase' # type: str self.connection_string = connection_string self.cred_string = cred_string self.encrypted_credential = encrypted_credential @@ -5390,7 +5389,7 @@ def __init__( **kwargs ): super(CouchbaseTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'CouchbaseTable' + self.type = 'CouchbaseTable' # type: str self.table_name = table_name @@ -5571,7 +5570,7 @@ def __init__( **kwargs ): super(CustomActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type: str = 'Custom' + self.type = 'Custom' # type: str self.command = command self.resource_linked_service = resource_linked_service self.folder_path = folder_path @@ -5655,7 +5654,7 @@ def __init__( **kwargs ): super(CustomDataSourceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'CustomDataSource' + self.type = 'CustomDataSource' # type: str self.type_properties = type_properties @@ -5728,7 +5727,7 @@ def __init__( **kwargs ): super(DatabricksNotebookActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type: str = 'DatabricksNotebook' + self.type = 'DatabricksNotebook' # type: str self.notebook_path = notebook_path self.base_parameters = base_parameters self.libraries = libraries @@ -5802,7 +5801,7 @@ def __init__( **kwargs ): super(DatabricksSparkJarActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type: str = 'DatabricksSparkJar' + self.type = 'DatabricksSparkJar' # type: str self.main_class_name = main_class_name self.parameters = parameters self.libraries = libraries @@ -5875,7 +5874,7 @@ def __init__( **kwargs ): super(DatabricksSparkPythonActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type: str = 'DatabricksSparkPython' + self.type = 'DatabricksSparkPython' # type: str self.python_file = python_file self.parameters = parameters self.libraries = libraries @@ -5924,7 +5923,7 @@ def __init__( **kwargs ): super(DataFlow, self).__init__(**kwargs) - self.type: Optional[str] = None + self.type = None # type: Optional[str] self.description = description self.annotations = annotations self.folder = folder @@ -6376,15 +6375,13 @@ def __init__( class DataFlowReference(msrest.serialization.Model): """Data flow reference type. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :ivar type: Required. Data flow reference type. Default value: "DataFlowReference". - :vartype type: str + :param type: Required. Data flow reference type. Possible values include: "DataFlowReference". + :type type: str or ~azure.synapse.artifacts.models.DataFlowReferenceType :param reference_name: Required. Reference data flow name. :type reference_name: str :param dataset_parameters: Reference data flow parameters from dataset. @@ -6392,7 +6389,7 @@ class DataFlowReference(msrest.serialization.Model): """ _validation = { - 'type': {'required': True, 'constant': True}, + 'type': {'required': True}, 'reference_name': {'required': True}, } @@ -6403,11 +6400,10 @@ class DataFlowReference(msrest.serialization.Model): 'dataset_parameters': {'key': 'datasetParameters', 'type': 'object'}, } - type = "DataFlowReference" - def __init__( self, *, + type: Union[str, "DataFlowReferenceType"], reference_name: str, additional_properties: Optional[Dict[str, object]] = None, dataset_parameters: Optional[object] = None, @@ -6415,6 +6411,7 @@ def __init__( ): super(DataFlowReference, self).__init__(**kwargs) self.additional_properties = additional_properties + self.type = type self.reference_name = reference_name self.dataset_parameters = dataset_parameters @@ -6753,7 +6750,7 @@ def __init__( **kwargs ): super(DataLakeAnalyticsUSQLActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type: str = 'DataLakeAnalyticsU-SQL' + self.type = 'DataLakeAnalyticsU-SQL' # type: str self.script_path = script_path self.script_linked_service = script_linked_service self.degree_of_parallelism = degree_of_parallelism @@ -6799,7 +6796,7 @@ def __init__( ): super(DatasetCompression, self).__init__(**kwargs) self.additional_properties = additional_properties - self.type: str = 'DatasetCompression' + self.type = 'DatasetCompression' # type: str class DatasetBZip2Compression(DatasetCompression): @@ -6830,7 +6827,7 @@ def __init__( **kwargs ): super(DatasetBZip2Compression, self).__init__(additional_properties=additional_properties, **kwargs) - self.type: str = 'BZip2' + self.type = 'BZip2' # type: str class DatasetDebugResource(SubResourceDebugResource): @@ -6896,7 +6893,7 @@ def __init__( **kwargs ): super(DatasetDeflateCompression, self).__init__(additional_properties=additional_properties, **kwargs) - self.type: str = 'Deflate' + self.type = 'Deflate' # type: str self.level = level @@ -6953,7 +6950,7 @@ def __init__( **kwargs ): super(DatasetGZipCompression, self).__init__(additional_properties=additional_properties, **kwargs) - self.type: str = 'GZip' + self.type = 'GZip' # type: str self.level = level @@ -7035,7 +7032,7 @@ def __init__( ): super(DatasetLocation, self).__init__(**kwargs) self.additional_properties = additional_properties - self.type: str = 'DatasetLocation' + self.type = 'DatasetLocation' # type: str self.folder_path = folder_path self.file_name = file_name @@ -7043,12 +7040,10 @@ def __init__( class DatasetReference(msrest.serialization.Model): """Dataset reference type. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. - :ivar type: Required. Dataset reference type. Default value: "DatasetReference". - :vartype type: str + :param type: Required. Dataset reference type. Possible values include: "DatasetReference". + :type type: str or ~azure.synapse.artifacts.models.DatasetReferenceType :param reference_name: Required. Reference dataset name. :type reference_name: str :param parameters: Arguments for dataset. @@ -7056,7 +7051,7 @@ class DatasetReference(msrest.serialization.Model): """ _validation = { - 'type': {'required': True, 'constant': True}, + 'type': {'required': True}, 'reference_name': {'required': True}, } @@ -7066,16 +7061,16 @@ class DatasetReference(msrest.serialization.Model): 'parameters': {'key': 'parameters', 'type': '{object}'}, } - type = "DatasetReference" - def __init__( self, *, + type: Union[str, "DatasetReferenceType"], reference_name: str, parameters: Optional[Dict[str, object]] = None, **kwargs ): super(DatasetReference, self).__init__(**kwargs) + self.type = type self.reference_name = reference_name self.parameters = parameters @@ -7157,15 +7152,13 @@ def __init__( **kwargs ): super(DatasetZipDeflateCompression, self).__init__(additional_properties=additional_properties, **kwargs) - self.type: str = 'ZipDeflate' + self.type = 'ZipDeflate' # type: str self.level = level class Db2LinkedService(LinkedService): """Linked service for DB2 data source. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this @@ -7187,9 +7180,9 @@ class Db2LinkedService(LinkedService): :param database: Required. Database name for connection. Type: string (or Expression with resultType string). :type database: object - :ivar authentication_type: AuthenticationType to be used for connection. Default value: - "Basic". - :vartype authentication_type: str + :param authentication_type: AuthenticationType to be used for connection. Possible values + include: "Basic". + :type authentication_type: str or ~azure.synapse.artifacts.models.Db2AuthenticationType :param username: Username for authentication. Type: string (or Expression with resultType string). :type username: object @@ -7211,7 +7204,6 @@ class Db2LinkedService(LinkedService): 'type': {'required': True}, 'server': {'required': True}, 'database': {'required': True}, - 'authentication_type': {'constant': True}, } _attribute_map = { @@ -7231,8 +7223,6 @@ class Db2LinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - authentication_type = "Basic" - def __init__( self, *, @@ -7243,6 +7233,7 @@ def __init__( description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, + authentication_type: Optional[Union[str, "Db2AuthenticationType"]] = None, username: Optional[object] = None, password: Optional["SecretBase"] = None, package_collection: Optional[object] = None, @@ -7251,9 +7242,10 @@ def __init__( **kwargs ): super(Db2LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Db2' + self.type = 'Db2' # type: str self.server = server self.database = database + self.authentication_type = authentication_type self.username = username self.password = password self.package_collection = package_collection @@ -7335,7 +7327,7 @@ def __init__( **kwargs ): super(Db2TableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'Db2Table' + self.type = 'Db2Table' # type: str self.table_name = table_name self.schema_type_properties_schema = schema_type_properties_schema self.table = table @@ -7420,7 +7412,7 @@ def __init__( **kwargs ): super(DeleteActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type: str = 'Delete' + self.type = 'Delete' # type: str self.recursive = recursive self.max_concurrent_connections = max_concurrent_connections self.enable_logging = enable_logging @@ -7563,7 +7555,7 @@ def __init__( **kwargs ): super(DelimitedTextDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'DelimitedText' + self.type = 'DelimitedText' # type: str self.location = location self.column_delimiter = column_delimiter self.row_delimiter = row_delimiter @@ -7642,7 +7634,7 @@ def __init__( **kwargs ): super(DocumentDbCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'DocumentDbCollection' + self.type = 'DocumentDbCollection' # type: str self.collection_name = collection_name @@ -7705,7 +7697,7 @@ def __init__( **kwargs ): super(DrillLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Drill' + self.type = 'Drill' # type: str self.connection_string = connection_string self.pwd = pwd self.encrypted_credential = encrypted_credential @@ -7785,7 +7777,7 @@ def __init__( **kwargs ): super(DrillTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'DrillTable' + self.type = 'DrillTable' # type: str self.table_name = table_name self.table = table self.schema_type_properties_schema = schema_type_properties_schema @@ -7873,7 +7865,7 @@ def __init__( **kwargs ): super(DynamicsAXLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'DynamicsAX' + self.type = 'DynamicsAX' # type: str self.url = url self.service_principal_id = service_principal_id self.service_principal_key = service_principal_key @@ -7948,7 +7940,7 @@ def __init__( **kwargs ): super(DynamicsAXResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'DynamicsAXResource' + self.type = 'DynamicsAXResource' # type: str self.path = path @@ -8017,7 +8009,7 @@ def __init__( **kwargs ): super(DynamicsCrmEntityDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'DynamicsCrmEntity' + self.type = 'DynamicsCrmEntity' # type: str self.entity_name = entity_name @@ -8141,7 +8133,7 @@ def __init__( **kwargs ): super(DynamicsCrmLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'DynamicsCrm' + self.type = 'DynamicsCrm' # type: str self.deployment_type = deployment_type self.host_name = host_name self.port = port @@ -8221,7 +8213,7 @@ def __init__( **kwargs ): super(DynamicsEntityDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'DynamicsEntity' + self.type = 'DynamicsEntity' # type: str self.entity_name = entity_name @@ -8342,7 +8334,7 @@ def __init__( **kwargs ): super(DynamicsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Dynamics' + self.type = 'Dynamics' # type: str self.deployment_type = deployment_type self.host_name = host_name self.port = port @@ -8438,7 +8430,7 @@ def __init__( **kwargs ): super(EloquaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Eloqua' + self.type = 'Eloqua' # type: str self.endpoint = endpoint self.username = username self.password = password @@ -8512,7 +8504,7 @@ def __init__( **kwargs ): super(EloquaObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'EloquaObject' + self.type = 'EloquaObject' # type: str self.table_name = table_name @@ -8627,7 +8619,7 @@ def __init__( **kwargs ): super(ExecuteDataFlowActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type: str = 'ExecuteDataFlow' + self.type = 'ExecuteDataFlow' # type: str self.data_flow = data_flow self.staging = staging self.integration_runtime = integration_runtime @@ -8721,7 +8713,7 @@ def __init__( **kwargs ): super(ExecutePipelineActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.type: str = 'ExecutePipeline' + self.type = 'ExecutePipeline' # type: str self.pipeline = pipeline self.parameters = parameters self.wait_on_completion = wait_on_completion @@ -8835,7 +8827,7 @@ def __init__( **kwargs ): super(ExecuteSSISPackageActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type: str = 'ExecuteSSISPackage' + self.type = 'ExecuteSSISPackage' # type: str self.package_location = package_location self.runtime = runtime self.logging_level = logging_level @@ -8909,18 +8901,16 @@ def __init__( class Expression(msrest.serialization.Model): """Azure Synapse expression definition. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. - :ivar type: Required. Expression type. Default value: "Expression". - :vartype type: str + :param type: Required. Expression type. Possible values include: "Expression". + :type type: str or ~azure.synapse.artifacts.models.ExpressionType :param value: Required. Expression value. :type value: str """ _validation = { - 'type': {'required': True, 'constant': True}, + 'type': {'required': True}, 'value': {'required': True}, } @@ -8929,15 +8919,15 @@ class Expression(msrest.serialization.Model): 'value': {'key': 'value', 'type': 'str'}, } - type = "Expression" - def __init__( self, *, + type: Union[str, "ExpressionType"], value: str, **kwargs ): super(Expression, self).__init__(**kwargs) + self.type = type self.value = value @@ -9006,7 +8996,7 @@ def __init__( **kwargs ): super(FileServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'FileServer' + self.type = 'FileServer' # type: str self.host = host self.user_id = user_id self.password = password @@ -9068,7 +9058,7 @@ def __init__( **kwargs ): super(FilterActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.type: str = 'Filter' + self.type = 'Filter' # type: str self.items = items self.condition = condition @@ -9138,7 +9128,7 @@ def __init__( **kwargs ): super(ForEachActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.type: str = 'ForEach' + self.type = 'ForEach' # type: str self.is_sequential = is_sequential self.batch_count = batch_count self.items = items @@ -9231,7 +9221,7 @@ def __init__( **kwargs ): super(FtpServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'FtpServer' + self.type = 'FtpServer' # type: str self.host = host self.port = port self.authentication_type = authentication_type @@ -9304,7 +9294,7 @@ def __init__( **kwargs ): super(GetMetadataActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type: str = 'GetMetadata' + self.type = 'GetMetadata' # type: str self.dataset = dataset self.field_list = field_list @@ -9436,7 +9426,7 @@ def __init__( **kwargs ): super(GoogleAdWordsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'GoogleAdWords' + self.type = 'GoogleAdWords' # type: str self.client_customer_id = client_customer_id self.developer_token = developer_token self.authentication_type = authentication_type @@ -9514,7 +9504,7 @@ def __init__( **kwargs ): super(GoogleAdWordsObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'GoogleAdWordsObject' + self.type = 'GoogleAdWordsObject' # type: str self.table_name = table_name @@ -9627,7 +9617,7 @@ def __init__( **kwargs ): super(GoogleBigQueryLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'GoogleBigQuery' + self.type = 'GoogleBigQuery' # type: str self.project = project self.additional_projects = additional_projects self.request_google_drive_scope = request_google_drive_scope @@ -9717,7 +9707,7 @@ def __init__( **kwargs ): super(GoogleBigQueryObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'GoogleBigQueryObject' + self.type = 'GoogleBigQueryObject' # type: str self.table_name = table_name self.table = table self.dataset = dataset @@ -9790,7 +9780,7 @@ def __init__( **kwargs ): super(GoogleCloudStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'GoogleCloudStorage' + self.type = 'GoogleCloudStorage' # type: str self.access_key_id = access_key_id self.secret_access_key = secret_access_key self.service_url = service_url @@ -9856,7 +9846,7 @@ def __init__( **kwargs ): super(GreenplumLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Greenplum' + self.type = 'Greenplum' # type: str self.connection_string = connection_string self.pwd = pwd self.encrypted_credential = encrypted_credential @@ -9936,7 +9926,7 @@ def __init__( **kwargs ): super(GreenplumTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'GreenplumTable' + self.type = 'GreenplumTable' # type: str self.table_name = table_name self.table = table self.schema_type_properties_schema = schema_type_properties_schema @@ -10042,7 +10032,7 @@ def __init__( **kwargs ): super(HBaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'HBase' + self.type = 'HBase' # type: str self.host = host self.port = port self.http_path = http_path @@ -10120,7 +10110,7 @@ def __init__( **kwargs ): super(HBaseObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'HBaseObject' + self.type = 'HBaseObject' # type: str self.table_name = table_name @@ -10194,7 +10184,7 @@ def __init__( **kwargs ): super(HdfsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Hdfs' + self.type = 'Hdfs' # type: str self.url = url self.authentication_type = authentication_type self.encrypted_credential = encrypted_credential @@ -10288,7 +10278,7 @@ def __init__( **kwargs ): super(HDInsightHiveActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type: str = 'HDInsightHive' + self.type = 'HDInsightHive' # type: str self.storage_linked_services = storage_linked_services self.arguments = arguments self.get_debug_info = get_debug_info @@ -10383,7 +10373,7 @@ def __init__( **kwargs ): super(HDInsightLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'HDInsight' + self.type = 'HDInsight' # type: str self.cluster_uri = cluster_uri self.user_name = user_name self.password = password @@ -10481,7 +10471,7 @@ def __init__( **kwargs ): super(HDInsightMapReduceActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type: str = 'HDInsightMapReduce' + self.type = 'HDInsightMapReduce' # type: str self.storage_linked_services = storage_linked_services self.arguments = arguments self.get_debug_info = get_debug_info @@ -10709,7 +10699,7 @@ def __init__( **kwargs ): super(HDInsightOnDemandLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'HDInsightOnDemand' + self.type = 'HDInsightOnDemand' # type: str self.cluster_size = cluster_size self.time_to_live = time_to_live self.version = version @@ -10823,7 +10813,7 @@ def __init__( **kwargs ): super(HDInsightPigActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type: str = 'HDInsightPig' + self.type = 'HDInsightPig' # type: str self.storage_linked_services = storage_linked_services self.arguments = arguments self.get_debug_info = get_debug_info @@ -10923,7 +10913,7 @@ def __init__( **kwargs ): super(HDInsightSparkActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type: str = 'HDInsightSpark' + self.type = 'HDInsightSpark' # type: str self.root_path = root_path self.entry_file_path = entry_file_path self.arguments = arguments @@ -11042,7 +11032,7 @@ def __init__( **kwargs ): super(HDInsightStreamingActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type: str = 'HDInsightStreaming' + self.type = 'HDInsightStreaming' # type: str self.storage_linked_services = storage_linked_services self.arguments = arguments self.get_debug_info = get_debug_info @@ -11188,7 +11178,7 @@ def __init__( **kwargs ): super(HiveLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Hive' + self.type = 'Hive' # type: str self.host = host self.port = port self.server_type = server_type @@ -11282,7 +11272,7 @@ def __init__( **kwargs ): super(HiveObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'HiveObject' + self.type = 'HiveObject' # type: str self.table_name = table_name self.table = table self.schema_type_properties_schema = schema_type_properties_schema @@ -11378,7 +11368,7 @@ def __init__( **kwargs ): super(HttpLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'HttpServer' + self.type = 'HttpServer' # type: str self.url = url self.authentication_type = authentication_type self.user_name = user_name @@ -11474,7 +11464,7 @@ def __init__( **kwargs ): super(HubspotLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Hubspot' + self.type = 'Hubspot' # type: str self.client_id = client_id self.client_secret = client_secret self.access_token = access_token @@ -11549,7 +11539,7 @@ def __init__( **kwargs ): super(HubspotObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'HubspotObject' + self.type = 'HubspotObject' # type: str self.table_name = table_name @@ -11615,7 +11605,7 @@ def __init__( **kwargs ): super(IfConditionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.type: str = 'IfCondition' + self.type = 'IfCondition' # type: str self.expression = expression self.if_true_activities = if_true_activities self.if_false_activities = if_false_activities @@ -11723,7 +11713,7 @@ def __init__( **kwargs ): super(ImpalaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Impala' + self.type = 'Impala' # type: str self.host = host self.port = port self.authentication_type = authentication_type @@ -11812,7 +11802,7 @@ def __init__( **kwargs ): super(ImpalaObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'ImpalaObject' + self.type = 'ImpalaObject' # type: str self.table_name = table_name self.table = table self.schema_type_properties_schema = schema_type_properties_schema @@ -11895,7 +11885,7 @@ def __init__( **kwargs ): super(InformixLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Informix' + self.type = 'Informix' # type: str self.connection_string = connection_string self.authentication_type = authentication_type self.credential = credential @@ -11969,20 +11959,18 @@ def __init__( **kwargs ): super(InformixTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'InformixTable' + self.type = 'InformixTable' # type: str self.table_name = table_name class IntegrationRuntimeReference(msrest.serialization.Model): """Integration runtime reference type. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. - :ivar type: Required. Type of integration runtime. Default value: + :param type: Required. Type of integration runtime. Possible values include: "IntegrationRuntimeReference". - :vartype type: str + :type type: str or ~azure.synapse.artifacts.models.IntegrationRuntimeReferenceType :param reference_name: Required. Reference integration runtime name. :type reference_name: str :param parameters: Arguments for integration runtime. @@ -11990,7 +11978,7 @@ class IntegrationRuntimeReference(msrest.serialization.Model): """ _validation = { - 'type': {'required': True, 'constant': True}, + 'type': {'required': True}, 'reference_name': {'required': True}, } @@ -12000,16 +11988,16 @@ class IntegrationRuntimeReference(msrest.serialization.Model): 'parameters': {'key': 'parameters', 'type': '{object}'}, } - type = "IntegrationRuntimeReference" - def __init__( self, *, + type: Union[str, "IntegrationRuntimeReferenceType"], reference_name: str, parameters: Optional[Dict[str, object]] = None, **kwargs ): super(IntegrationRuntimeReference, self).__init__(**kwargs) + self.type = type self.reference_name = reference_name self.parameters = parameters @@ -12101,7 +12089,7 @@ def __init__( **kwargs ): super(JiraLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Jira' + self.type = 'Jira' # type: str self.host = host self.port = port self.username = username @@ -12176,7 +12164,7 @@ def __init__( **kwargs ): super(JiraObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'JiraObject' + self.type = 'JiraObject' # type: str self.table_name = table_name @@ -12256,7 +12244,7 @@ def __init__( **kwargs ): super(JsonDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'Json' + self.type = 'Json' # type: str self.location = location self.encoding_name = encoding_name self.compression = compression @@ -12328,12 +12316,11 @@ def __init__( class LinkedServiceReference(msrest.serialization.Model): """Linked service reference type. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. - :ivar type: Required. Linked service reference type. Default value: "LinkedServiceReference". - :vartype type: str + :param type: Required. Linked service reference type. Possible values include: + "LinkedServiceReference". + :type type: str or ~azure.synapse.artifacts.models.Type :param reference_name: Required. Reference LinkedService name. :type reference_name: str :param parameters: Arguments for LinkedService. @@ -12341,7 +12328,7 @@ class LinkedServiceReference(msrest.serialization.Model): """ _validation = { - 'type': {'required': True, 'constant': True}, + 'type': {'required': True}, 'reference_name': {'required': True}, } @@ -12351,16 +12338,16 @@ class LinkedServiceReference(msrest.serialization.Model): 'parameters': {'key': 'parameters', 'type': '{object}'}, } - type = "LinkedServiceReference" - def __init__( self, *, + type: Union[str, "Type"], reference_name: str, parameters: Optional[Dict[str, object]] = None, **kwargs ): super(LinkedServiceReference, self).__init__(**kwargs) + self.type = type self.reference_name = reference_name self.parameters = parameters @@ -12517,7 +12504,7 @@ def __init__( **kwargs ): super(LookupActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type: str = 'Lookup' + self.type = 'Lookup' # type: str self.source = source self.dataset = dataset self.first_row_only = first_row_only @@ -12598,7 +12585,7 @@ def __init__( **kwargs ): super(MagentoLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Magento' + self.type = 'Magento' # type: str self.host = host self.access_token = access_token self.use_encrypted_endpoints = use_encrypted_endpoints @@ -12671,7 +12658,7 @@ def __init__( **kwargs ): super(MagentoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'MagentoObject' + self.type = 'MagentoObject' # type: str self.table_name = table_name @@ -12727,7 +12714,7 @@ def __init__( **kwargs ): super(MappingDataFlow, self).__init__(description=description, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'MappingDataFlow' + self.type = 'MappingDataFlow' # type: str self.sources = sources self.sinks = sinks self.transformations = transformations @@ -12793,7 +12780,7 @@ def __init__( **kwargs ): super(MariaDBLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'MariaDB' + self.type = 'MariaDB' # type: str self.connection_string = connection_string self.pwd = pwd self.encrypted_credential = encrypted_credential @@ -12863,7 +12850,7 @@ def __init__( **kwargs ): super(MariaDBTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'MariaDBTable' + self.type = 'MariaDBTable' # type: str self.table_name = table_name @@ -12947,7 +12934,7 @@ def __init__( **kwargs ): super(MarketoLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Marketo' + self.type = 'Marketo' # type: str self.endpoint = endpoint self.client_id = client_id self.client_secret = client_secret @@ -13021,7 +13008,7 @@ def __init__( **kwargs ): super(MarketoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'MarketoObject' + self.type = 'MarketoObject' # type: str self.table_name = table_name @@ -13102,7 +13089,7 @@ def __init__( **kwargs ): super(MicrosoftAccessLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'MicrosoftAccess' + self.type = 'MicrosoftAccess' # type: str self.connection_string = connection_string self.authentication_type = authentication_type self.credential = credential @@ -13176,7 +13163,7 @@ def __init__( **kwargs ): super(MicrosoftAccessTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'MicrosoftAccessTable' + self.type = 'MicrosoftAccessTable' # type: str self.table_name = table_name @@ -13246,7 +13233,7 @@ def __init__( **kwargs ): super(MongoDbCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'MongoDbCollection' + self.type = 'MongoDbCollection' # type: str self.collection_name = collection_name @@ -13346,7 +13333,7 @@ def __init__( **kwargs ): super(MongoDbLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'MongoDb' + self.type = 'MongoDb' # type: str self.server = server self.authentication_type = authentication_type self.database_name = database_name @@ -13425,7 +13412,7 @@ def __init__( **kwargs ): super(MongoDbV2CollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'MongoDbV2Collection' + self.type = 'MongoDbV2Collection' # type: str self.collection = collection @@ -13485,7 +13472,7 @@ def __init__( **kwargs ): super(MongoDbV2LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'MongoDbV2' + self.type = 'MongoDbV2' # type: str self.connection_string = connection_string self.database = database @@ -13541,7 +13528,7 @@ def __init__( ): super(Trigger, self).__init__(**kwargs) self.additional_properties = additional_properties - self.type: str = 'Trigger' + self.type = 'Trigger' # type: str self.description = description self.runtime_state = None self.annotations = annotations @@ -13594,7 +13581,7 @@ def __init__( **kwargs ): super(MultiplePipelineTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) - self.type: str = 'MultiplePipelineTrigger' + self.type = 'MultiplePipelineTrigger' # type: str self.pipelines = pipelines @@ -13657,7 +13644,7 @@ def __init__( **kwargs ): super(MySqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'MySql' + self.type = 'MySql' # type: str self.connection_string = connection_string self.password = password self.encrypted_credential = encrypted_credential @@ -13727,7 +13714,7 @@ def __init__( **kwargs ): super(MySqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'MySqlTable' + self.type = 'MySqlTable' # type: str self.table_name = table_name @@ -13790,7 +13777,7 @@ def __init__( **kwargs ): super(NetezzaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Netezza' + self.type = 'Netezza' # type: str self.connection_string = connection_string self.pwd = pwd self.encrypted_credential = encrypted_credential @@ -13871,7 +13858,7 @@ def __init__( **kwargs ): super(NetezzaTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'NetezzaTable' + self.type = 'NetezzaTable' # type: str self.table_name = table_name self.table = table self.schema_type_properties_schema = schema_type_properties_schema @@ -14400,7 +14387,7 @@ def __init__( **kwargs ): super(ODataLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'OData' + self.type = 'OData' # type: str self.url = url self.authentication_type = authentication_type self.user_name = user_name @@ -14479,7 +14466,7 @@ def __init__( **kwargs ): super(ODataResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'ODataResource' + self.type = 'ODataResource' # type: str self.path = path @@ -14559,7 +14546,7 @@ def __init__( **kwargs ): super(OdbcLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Odbc' + self.type = 'Odbc' # type: str self.connection_string = connection_string self.authentication_type = authentication_type self.credential = credential @@ -14632,7 +14619,7 @@ def __init__( **kwargs ): super(OdbcTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'OdbcTable' + self.type = 'OdbcTable' # type: str self.table_name = table_name @@ -14707,7 +14694,7 @@ def __init__( **kwargs ): super(Office365Dataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'Office365Table' + self.type = 'Office365Table' # type: str self.table_name = table_name self.predicate = predicate @@ -14785,7 +14772,7 @@ def __init__( **kwargs ): super(Office365LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Office365' + self.type = 'Office365' # type: str self.office365_tenant_id = office365_tenant_id self.service_principal_tenant_id = service_principal_tenant_id self.service_principal_id = service_principal_id @@ -14853,7 +14840,7 @@ def __init__( **kwargs ): super(OracleLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Oracle' + self.type = 'Oracle' # type: str self.connection_string = connection_string self.password = password self.encrypted_credential = encrypted_credential @@ -14942,7 +14929,7 @@ def __init__( **kwargs ): super(OracleServiceCloudLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'OracleServiceCloud' + self.type = 'OracleServiceCloud' # type: str self.host = host self.username = username self.password = password @@ -15016,7 +15003,7 @@ def __init__( **kwargs ): super(OracleServiceCloudObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'OracleServiceCloudObject' + self.type = 'OracleServiceCloudObject' # type: str self.table_name = table_name @@ -15095,7 +15082,7 @@ def __init__( **kwargs ): super(OracleTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'OracleTable' + self.type = 'OracleTable' # type: str self.table_name = table_name self.schema_type_properties_schema = schema_type_properties_schema self.table = table @@ -15169,7 +15156,7 @@ def __init__( **kwargs ): super(OrcDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'Orc' + self.type = 'Orc' # type: str self.location = location self.orc_compression_codec = orc_compression_codec @@ -15275,7 +15262,7 @@ def __init__( **kwargs ): super(ParquetDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'Parquet' + self.type = 'Parquet' # type: str self.location = location self.compression_codec = compression_codec @@ -15360,7 +15347,7 @@ def __init__( **kwargs ): super(PaypalLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Paypal' + self.type = 'Paypal' # type: str self.host = host self.client_id = client_id self.client_secret = client_secret @@ -15434,7 +15421,7 @@ def __init__( **kwargs ): super(PaypalObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'PaypalObject' + self.type = 'PaypalObject' # type: str self.table_name = table_name @@ -15546,7 +15533,7 @@ def __init__( **kwargs ): super(PhoenixLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Phoenix' + self.type = 'Phoenix' # type: str self.host = host self.port = port self.http_path = http_path @@ -15636,7 +15623,7 @@ def __init__( **kwargs ): super(PhoenixObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'PhoenixObject' + self.type = 'PhoenixObject' # type: str self.table_name = table_name self.table = table self.schema_type_properties_schema = schema_type_properties_schema @@ -15698,12 +15685,10 @@ def __init__( class PipelineReference(msrest.serialization.Model): """Pipeline reference type. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. - :ivar type: Required. Pipeline reference type. Default value: "PipelineReference". - :vartype type: str + :param type: Required. Pipeline reference type. Possible values include: "PipelineReference". + :type type: str or ~azure.synapse.artifacts.models.PipelineReferenceType :param reference_name: Required. Reference pipeline name. :type reference_name: str :param name: Reference name. @@ -15711,7 +15696,7 @@ class PipelineReference(msrest.serialization.Model): """ _validation = { - 'type': {'required': True, 'constant': True}, + 'type': {'required': True}, 'reference_name': {'required': True}, } @@ -15721,16 +15706,16 @@ class PipelineReference(msrest.serialization.Model): 'name': {'key': 'name', 'type': 'str'}, } - type = "PipelineReference" - def __init__( self, *, + type: Union[str, "PipelineReferenceType"], reference_name: str, name: Optional[str] = None, **kwargs ): super(PipelineReference, self).__init__(**kwargs) + self.type = type self.reference_name = reference_name self.name = name @@ -16035,7 +16020,7 @@ def __init__( **kwargs ): super(PostgreSqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'PostgreSql' + self.type = 'PostgreSql' # type: str self.connection_string = connection_string self.password = password self.encrypted_credential = encrypted_credential @@ -16115,7 +16100,7 @@ def __init__( **kwargs ): super(PostgreSqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'PostgreSqlTable' + self.type = 'PostgreSqlTable' # type: str self.table_name = table_name self.table = table self.schema_type_properties_schema = schema_type_properties_schema @@ -16237,7 +16222,7 @@ def __init__( **kwargs ): super(PrestoLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Presto' + self.type = 'Presto' # type: str self.host = host self.server_version = server_version self.catalog = catalog @@ -16329,7 +16314,7 @@ def __init__( **kwargs ): super(PrestoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'PrestoObject' + self.type = 'PrestoObject' # type: str self.table_name = table_name self.table = table self.schema_type_properties_schema = schema_type_properties_schema @@ -16447,7 +16432,7 @@ def __init__( **kwargs ): super(QuickBooksLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'QuickBooks' + self.type = 'QuickBooks' # type: str self.endpoint = endpoint self.company_id = company_id self.consumer_key = consumer_key @@ -16522,7 +16507,7 @@ def __init__( **kwargs ): super(QuickBooksObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'QuickBooksObject' + self.type = 'QuickBooksObject' # type: str self.table_name = table_name @@ -16633,7 +16618,7 @@ def __init__( **kwargs ): super(RelationalTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'RelationalTable' + self.type = 'RelationalTable' # type: str self.table_name = table_name @@ -16782,7 +16767,7 @@ def __init__( **kwargs ): super(RerunTumblingWindowTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) - self.type: str = 'RerunTumblingWindowTrigger' + self.type = 'RerunTumblingWindowTrigger' # type: str self.parent_trigger = parent_trigger self.requested_start_time = requested_start_time self.requested_end_time = requested_end_time @@ -16965,7 +16950,7 @@ def __init__( **kwargs ): super(ResponsysLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Responsys' + self.type = 'Responsys' # type: str self.endpoint = endpoint self.client_id = client_id self.client_secret = client_secret @@ -17039,7 +17024,7 @@ def __init__( **kwargs ): super(ResponsysObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'ResponsysObject' + self.type = 'ResponsysObject' # type: str self.table_name = table_name @@ -17128,7 +17113,7 @@ def __init__( **kwargs ): super(RestResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'RestResource' + self.type = 'RestResource' # type: str self.relative_url = relative_url self.request_method = request_method self.request_body = request_body @@ -17231,7 +17216,7 @@ def __init__( **kwargs ): super(RestServiceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'RestService' + self.type = 'RestService' # type: str self.url = url self.enable_server_certificate_validation = enable_server_certificate_validation self.authentication_type = authentication_type @@ -17448,7 +17433,7 @@ def __init__( **kwargs ): super(SalesforceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Salesforce' + self.type = 'Salesforce' # type: str self.environment_url = environment_url self.username = username self.password = password @@ -17534,7 +17519,7 @@ def __init__( **kwargs ): super(SalesforceMarketingCloudLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'SalesforceMarketingCloud' + self.type = 'SalesforceMarketingCloud' # type: str self.client_id = client_id self.client_secret = client_secret self.use_encrypted_endpoints = use_encrypted_endpoints @@ -17607,7 +17592,7 @@ def __init__( **kwargs ): super(SalesforceMarketingCloudObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'SalesforceMarketingCloudObject' + self.type = 'SalesforceMarketingCloudObject' # type: str self.table_name = table_name @@ -17676,7 +17661,7 @@ def __init__( **kwargs ): super(SalesforceObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'SalesforceObject' + self.type = 'SalesforceObject' # type: str self.object_api_name = object_api_name @@ -17755,7 +17740,7 @@ def __init__( **kwargs ): super(SalesforceServiceCloudLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'SalesforceServiceCloud' + self.type = 'SalesforceServiceCloud' # type: str self.environment_url = environment_url self.username = username self.password = password @@ -17829,7 +17814,7 @@ def __init__( **kwargs ): super(SalesforceServiceCloudObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'SalesforceServiceCloudObject' + self.type = 'SalesforceServiceCloudObject' # type: str self.object_api_name = object_api_name @@ -17893,7 +17878,7 @@ def __init__( **kwargs ): super(SapBwCubeDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'SapBwCube' + self.type = 'SapBwCube' # type: str class SapBWLinkedService(LinkedService): @@ -17973,7 +17958,7 @@ def __init__( **kwargs ): super(SapBWLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'SapBW' + self.type = 'SapBW' # type: str self.server = server self.system_number = system_number self.client_id = client_id @@ -18048,7 +18033,7 @@ def __init__( **kwargs ): super(SapCloudForCustomerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'SapCloudForCustomer' + self.type = 'SapCloudForCustomer' # type: str self.url = url self.username = username self.password = password @@ -18121,7 +18106,7 @@ def __init__( **kwargs ): super(SapCloudForCustomerResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'SapCloudForCustomerResource' + self.type = 'SapCloudForCustomerResource' # type: str self.path = path @@ -18191,7 +18176,7 @@ def __init__( **kwargs ): super(SapEccLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'SapEcc' + self.type = 'SapEcc' # type: str self.url = url self.username = username self.password = password @@ -18264,7 +18249,7 @@ def __init__( **kwargs ): super(SapEccResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'SapEccResource' + self.type = 'SapEccResource' # type: str self.path = path @@ -18343,7 +18328,7 @@ def __init__( **kwargs ): super(SapHanaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'SapHana' + self.type = 'SapHana' # type: str self.connection_string = connection_string self.server = server self.authentication_type = authentication_type @@ -18421,7 +18406,7 @@ def __init__( **kwargs ): super(SapHanaTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'SapHanaTable' + self.type = 'SapHanaTable' # type: str self.schema_type_properties_schema = schema_type_properties_schema self.table = table @@ -18511,7 +18496,7 @@ def __init__( **kwargs ): super(SapOpenHubLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'SapOpenHub' + self.type = 'SapOpenHub' # type: str self.server = server self.system_number = system_number self.client_id = client_id @@ -18598,7 +18583,7 @@ def __init__( **kwargs ): super(SapOpenHubTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'SapOpenHubTable' + self.type = 'SapOpenHubTable' # type: str self.open_hub_destination_name = open_hub_destination_name self.exclude_last_request = exclude_last_request self.base_request_id = base_request_id @@ -18730,7 +18715,7 @@ def __init__( **kwargs ): super(SapTableLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'SapTable' + self.type = 'SapTable' # type: str self.server = server self.system_number = system_number self.client_id = client_id @@ -18815,7 +18800,7 @@ def __init__( **kwargs ): super(SapTableResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'SapTableResource' + self.type = 'SapTableResource' # type: str self.table_name = table_name @@ -18892,7 +18877,7 @@ def __init__( **kwargs ): super(SecureString, self).__init__(**kwargs) - self.type: str = 'SecureString' + self.type = 'SecureString' # type: str self.value = value @@ -18992,7 +18977,7 @@ def __init__( **kwargs ): super(ServiceNowLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'ServiceNow' + self.type = 'ServiceNow' # type: str self.endpoint = endpoint self.authentication_type = authentication_type self.username = username @@ -19069,7 +19054,7 @@ def __init__( **kwargs ): super(ServiceNowObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'ServiceNowObject' + self.type = 'ServiceNowObject' # type: str self.table_name = table_name @@ -19126,7 +19111,7 @@ def __init__( **kwargs ): super(SetVariableActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.type: str = 'SetVariable' + self.type = 'SetVariable' # type: str self.variable_name = variable_name self.value = value @@ -19235,7 +19220,7 @@ def __init__( **kwargs ): super(SftpServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Sftp' + self.type = 'Sftp' # type: str self.host = host self.port = port self.authentication_type = authentication_type @@ -19325,7 +19310,7 @@ def __init__( **kwargs ): super(ShopifyLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Shopify' + self.type = 'Shopify' # type: str self.host = host self.access_token = access_token self.use_encrypted_endpoints = use_encrypted_endpoints @@ -19398,7 +19383,7 @@ def __init__( **kwargs ): super(ShopifyObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'ShopifyObject' + self.type = 'ShopifyObject' # type: str self.table_name = table_name @@ -19923,7 +19908,7 @@ def __init__( **kwargs ): super(SparkLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Spark' + self.type = 'Spark' # type: str self.host = host self.port = port self.server_type = server_type @@ -20014,7 +19999,7 @@ def __init__( **kwargs ): super(SparkObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'SparkObject' + self.type = 'SparkObject' # type: str self.table_name = table_name self.table = table self.schema_type_properties_schema = schema_type_properties_schema @@ -20265,11 +20250,110 @@ def __init__( self.name = name +class SqlPoolReference(msrest.serialization.Model): + """SQL pool reference type. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. SQL pool reference type. Possible values include: "SqlPoolReference". + :type type: str or ~azure.synapse.artifacts.models.SqlPoolReferenceType + :param reference_name: Required. Reference SQL pool name. + :type reference_name: str + """ + + _validation = { + 'type': {'required': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + } + + def __init__( + self, + *, + type: Union[str, "SqlPoolReferenceType"], + reference_name: str, + **kwargs + ): + super(SqlPoolReference, self).__init__(**kwargs) + self.type = type + self.reference_name = reference_name + + +class SqlPoolStoredProcedureActivity(Activity): + """Execute SQL pool stored procedure activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param sql_pool: Required. SQL pool stored procedure reference. + :type sql_pool: ~azure.synapse.artifacts.models.SqlPoolReference + :param stored_procedure_name: Required. Stored procedure name. Type: string (or Expression with + resultType string). + :type stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored procedure parameters. + Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.synapse.artifacts.models.StoredProcedureParameter] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'sql_pool': {'required': True}, + 'stored_procedure_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'sql_pool': {'key': 'sqlPool', 'type': 'SqlPoolReference'}, + 'stored_procedure_name': {'key': 'typeProperties.storedProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'typeProperties.storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + } + + def __init__( + self, + *, + name: str, + sql_pool: "SqlPoolReference", + stored_procedure_name: object, + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + depends_on: Optional[List["ActivityDependency"]] = None, + user_properties: Optional[List["UserProperty"]] = None, + stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None, + **kwargs + ): + super(SqlPoolStoredProcedureActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.type = 'SqlPoolStoredProcedure' # type: str + self.sql_pool = sql_pool + self.stored_procedure_name = stored_procedure_name + self.stored_procedure_parameters = stored_procedure_parameters + + class SqlScript(msrest.serialization.Model): """SQL script. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this @@ -20277,14 +20361,13 @@ class SqlScript(msrest.serialization.Model): :type additional_properties: dict[str, object] :param description: The description of the SQL script. :type description: str - :ivar type: The type of the SQL script. Default value: "SqlQuery". - :vartype type: str + :param type: The type of the SQL script. Possible values include: "SqlQuery". + :type type: str or ~azure.synapse.artifacts.models.SqlScriptType :param content: Required. The content of the SQL script. :type content: ~azure.synapse.artifacts.models.SqlScriptContent """ _validation = { - 'type': {'constant': True}, 'content': {'required': True}, } @@ -20295,19 +20378,19 @@ class SqlScript(msrest.serialization.Model): 'content': {'key': 'content', 'type': 'SqlScriptContent'}, } - type = "SqlQuery" - def __init__( self, *, content: "SqlScriptContent", additional_properties: Optional[Dict[str, object]] = None, description: Optional[str] = None, + type: Optional[Union[str, "SqlScriptType"]] = None, **kwargs ): super(SqlScript, self).__init__(**kwargs) self.additional_properties = additional_properties self.description = description + self.type = type self.content = content @@ -20524,7 +20607,7 @@ def __init__( **kwargs ): super(SqlServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'SqlServer' + self.type = 'SqlServer' # type: str self.connection_string = connection_string self.user_name = user_name self.password = password @@ -20596,7 +20679,7 @@ def __init__( **kwargs ): super(SqlServerStoredProcedureActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type: str = 'SqlServerStoredProcedure' + self.type = 'SqlServerStoredProcedure' # type: str self.stored_procedure_name = stored_procedure_name self.stored_procedure_parameters = stored_procedure_parameters @@ -20676,7 +20759,7 @@ def __init__( **kwargs ): super(SqlServerTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'SqlServerTable' + self.type = 'SqlServerTable' # type: str self.table_name = table_name self.schema_type_properties_schema = schema_type_properties_schema self.table = table @@ -20768,7 +20851,7 @@ def __init__( **kwargs ): super(SquareLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Square' + self.type = 'Square' # type: str self.host = host self.client_id = client_id self.client_secret = client_secret @@ -20843,7 +20926,7 @@ def __init__( **kwargs ): super(SquareObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'SquareObject' + self.type = 'SquareObject' # type: str self.table_name = table_name @@ -21001,15 +21084,13 @@ def __init__( class SSISLogLocation(msrest.serialization.Model): """SSIS package execution log location. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param log_path: Required. The SSIS package execution log path. Type: string (or Expression with resultType string). :type log_path: object - :ivar type: Required. The type of SSIS log location. Default value: "File". - :vartype type: str + :param type: Required. The type of SSIS log location. Possible values include: "File". + :type type: str or ~azure.synapse.artifacts.models.SsisLogLocationType :param access_credential: The package execution log access credential. :type access_credential: ~azure.synapse.artifacts.models.SSISAccessCredential :param log_refresh_interval: Specifies the interval to refresh log. The default interval is 5 @@ -21020,7 +21101,7 @@ class SSISLogLocation(msrest.serialization.Model): _validation = { 'log_path': {'required': True}, - 'type': {'required': True, 'constant': True}, + 'type': {'required': True}, } _attribute_map = { @@ -21030,18 +21111,18 @@ class SSISLogLocation(msrest.serialization.Model): 'log_refresh_interval': {'key': 'typeProperties.logRefreshInterval', 'type': 'object'}, } - type = "File" - def __init__( self, *, log_path: object, + type: Union[str, "SsisLogLocationType"], access_credential: Optional["SSISAccessCredential"] = None, log_refresh_interval: Optional[object] = None, **kwargs ): super(SSISLogLocation, self).__init__(**kwargs) self.log_path = log_path + self.type = type self.access_credential = access_credential self.log_refresh_interval = log_refresh_interval @@ -21388,7 +21469,7 @@ def __init__( **kwargs ): super(SwitchActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.type: str = 'Switch' + self.type = 'Switch' # type: str self.on = on self.cases = cases self.default_activities = default_activities @@ -21500,7 +21581,7 @@ def __init__( **kwargs ): super(SybaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Sybase' + self.type = 'Sybase' # type: str self.server = server self.database = database self.schema = schema @@ -21574,10 +21655,191 @@ def __init__( **kwargs ): super(SybaseTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'SybaseTable' + self.type = 'SybaseTable' # type: str self.table_name = table_name +class SynapseNotebookActivity(Activity): + """Execute Synapse notebook activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param notebook: Required. Synapse notebook reference. + :type notebook: ~azure.synapse.artifacts.models.SynapseNotebookReference + :param parameters: Notebook parameters. + :type parameters: dict[str, object] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'notebook': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'notebook': {'key': 'typeProperties.notebook', 'type': 'SynapseNotebookReference'}, + 'parameters': {'key': 'typeProperties.parameters', 'type': '{object}'}, + } + + def __init__( + self, + *, + name: str, + notebook: "SynapseNotebookReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + depends_on: Optional[List["ActivityDependency"]] = None, + user_properties: Optional[List["UserProperty"]] = None, + parameters: Optional[Dict[str, object]] = None, + **kwargs + ): + super(SynapseNotebookActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.type = 'SynapseNotebook' # type: str + self.notebook = notebook + self.parameters = parameters + + +class SynapseNotebookReference(msrest.serialization.Model): + """Synapse notebook reference type. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Synapse notebook reference type. Possible values include: + "NotebookReference". + :type type: str or ~azure.synapse.artifacts.models.NotebookReferenceType + :param reference_name: Required. Reference notebook name. + :type reference_name: str + """ + + _validation = { + 'type': {'required': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + } + + def __init__( + self, + *, + type: Union[str, "NotebookReferenceType"], + reference_name: str, + **kwargs + ): + super(SynapseNotebookReference, self).__init__(**kwargs) + self.type = type + self.reference_name = reference_name + + +class SynapseSparkJobDefinitionActivity(Activity): + """Execute spark job activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param spark_job: Required. Synapse spark job reference. + :type spark_job: ~azure.synapse.artifacts.models.SynapseSparkJobReference + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'spark_job': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'spark_job': {'key': 'typeProperties.sparkJob', 'type': 'SynapseSparkJobReference'}, + } + + def __init__( + self, + *, + name: str, + spark_job: "SynapseSparkJobReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + depends_on: Optional[List["ActivityDependency"]] = None, + user_properties: Optional[List["UserProperty"]] = None, + **kwargs + ): + super(SynapseSparkJobDefinitionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.type = 'SparkJob' # type: str + self.spark_job = spark_job + + +class SynapseSparkJobReference(msrest.serialization.Model): + """Synapse spark job reference type. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Synapse spark job reference type. Possible values include: + "SparkJobDefinitionReference". + :type type: str or ~azure.synapse.artifacts.models.SparkJobReferenceType + :param reference_name: Required. Reference spark job name. + :type reference_name: str + """ + + _validation = { + 'type': {'required': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + } + + def __init__( + self, + *, + type: Union[str, "SparkJobReferenceType"], + reference_name: str, + **kwargs + ): + super(SynapseSparkJobReference, self).__init__(**kwargs) + self.type = type + self.reference_name = reference_name + + class TeradataLinkedService(LinkedService): """Linked service for Teradata data source. @@ -21651,7 +21913,7 @@ def __init__( **kwargs ): super(TeradataLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Teradata' + self.type = 'Teradata' # type: str self.connection_string = connection_string self.server = server self.authentication_type = authentication_type @@ -21729,7 +21991,7 @@ def __init__( **kwargs ): super(TeradataTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'TeradataTable' + self.type = 'TeradataTable' # type: str self.database = database self.table = table @@ -22066,7 +22328,7 @@ def __init__( **kwargs ): super(UntilActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.type: str = 'Until' + self.type = 'Until' # type: str self.expression = expression self.timeout = timeout self.activities = activities @@ -22179,7 +22441,7 @@ def __init__( **kwargs ): super(ValidationActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.type: str = 'Validation' + self.type = 'Validation' # type: str self.timeout = timeout self.sleep = sleep self.minimum_size = minimum_size @@ -22279,7 +22541,7 @@ def __init__( **kwargs ): super(VerticaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Vertica' + self.type = 'Vertica' # type: str self.connection_string = connection_string self.pwd = pwd self.encrypted_credential = encrypted_credential @@ -22360,7 +22622,7 @@ def __init__( **kwargs ): super(VerticaTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'VerticaTable' + self.type = 'VerticaTable' # type: str self.table_name = table_name self.table = table self.schema_type_properties_schema = schema_type_properties_schema @@ -22416,7 +22678,7 @@ def __init__( **kwargs ): super(WaitActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.type: str = 'Wait' + self.type = 'Wait' # type: str self.wait_time_in_seconds = wait_time_in_seconds @@ -22512,7 +22774,7 @@ def __init__( **kwargs ): super(WebActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type: str = 'WebActivity' + self.type = 'WebActivity' # type: str self.method = method self.url = url self.headers = headers @@ -22610,7 +22872,7 @@ def __init__( ): super(WebLinkedServiceTypeProperties, self).__init__(**kwargs) self.url = url - self.authentication_type: Optional[str] = None + self.authentication_type = None # type: Optional[str] class WebAnonymousAuthentication(WebLinkedServiceTypeProperties): @@ -22644,7 +22906,7 @@ def __init__( **kwargs ): super(WebAnonymousAuthentication, self).__init__(url=url, **kwargs) - self.authentication_type: str = 'Anonymous' + self.authentication_type = 'Anonymous' # type: str class WebBasicAuthentication(WebLinkedServiceTypeProperties): @@ -22689,7 +22951,7 @@ def __init__( **kwargs ): super(WebBasicAuthentication, self).__init__(url=url, **kwargs) - self.authentication_type: str = 'Basic' + self.authentication_type = 'Basic' # type: str self.username = username self.password = password @@ -22735,7 +22997,7 @@ def __init__( **kwargs ): super(WebClientCertificateAuthentication, self).__init__(url=url, **kwargs) - self.authentication_type: str = 'ClientCertificate' + self.authentication_type = 'ClientCertificate' # type: str self.pfx = pfx self.password = password @@ -22743,8 +23005,6 @@ def __init__( class WebHookActivity(Activity): """WebHook activity. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this @@ -22760,8 +23020,8 @@ class WebHookActivity(Activity): :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :ivar method: Required. Rest API method for target endpoint. Default value: "POST". - :vartype method: str + :param method: Required. Rest API method for target endpoint. Possible values include: "POST". + :type method: str or ~azure.synapse.artifacts.models.WebHookActivityMethod :param url: Required. WebHook activity target endpoint and path. Type: string (or Expression with resultType string). :type url: object @@ -22788,7 +23048,7 @@ class WebHookActivity(Activity): _validation = { 'name': {'required': True}, 'type': {'required': True}, - 'method': {'required': True, 'constant': True}, + 'method': {'required': True}, 'url': {'required': True}, } @@ -22808,12 +23068,11 @@ class WebHookActivity(Activity): 'report_status_on_call_back': {'key': 'typeProperties.reportStatusOnCallBack', 'type': 'object'}, } - method = "POST" - def __init__( self, *, name: str, + method: Union[str, "WebHookActivityMethod"], url: object, additional_properties: Optional[Dict[str, object]] = None, description: Optional[str] = None, @@ -22827,7 +23086,8 @@ def __init__( **kwargs ): super(WebHookActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.type: str = 'WebHook' + self.type = 'WebHook' # type: str + self.method = method self.url = url self.timeout = timeout self.headers = headers @@ -22885,7 +23145,7 @@ def __init__( **kwargs ): super(WebLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Web' + self.type = 'Web' # type: str self.type_properties = type_properties @@ -22960,7 +23220,7 @@ def __init__( **kwargs ): super(WebTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'WebTable' + self.type = 'WebTable' # type: str self.index = index self.path = path @@ -23192,7 +23452,7 @@ def __init__( **kwargs ): super(XeroLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Xero' + self.type = 'Xero' # type: str self.host = host self.consumer_key = consumer_key self.private_key = private_key @@ -23266,7 +23526,7 @@ def __init__( **kwargs ): super(XeroObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'XeroObject' + self.type = 'XeroObject' # type: str self.table_name = table_name @@ -23345,7 +23605,7 @@ def __init__( **kwargs ): super(ZohoLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Zoho' + self.type = 'Zoho' # type: str self.endpoint = endpoint self.access_token = access_token self.use_encrypted_endpoints = use_encrypted_endpoints @@ -23418,5 +23678,5 @@ def __init__( **kwargs ): super(ZohoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'ZohoObject' + self.type = 'ZohoObject' # type: str self.table_name = table_name diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_data_flow_debug_session_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_data_flow_debug_session_operations.py index d8e978f6ea89..a185a33b87b5 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_data_flow_debug_session_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_data_flow_debug_session_operations.py @@ -57,6 +57,7 @@ def _create_data_flow_debug_session_initial( error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self._create_data_flow_debug_session_initial.metadata['url'] # type: ignore @@ -72,13 +73,12 @@ def _create_data_flow_debug_session_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(request, 'CreateDataFlowDebugSessionRequest') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -175,11 +175,12 @@ def query_data_flow_debug_sessions_by_workspace( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -248,6 +249,7 @@ def add_data_flow( error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.add_data_flow.metadata['url'] # type: ignore @@ -263,13 +265,12 @@ def add_data_flow( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(request, 'DataFlowDebugPackage') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -306,6 +307,7 @@ def delete_data_flow_debug_session( error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.delete_data_flow_debug_session.metadata['url'] # type: ignore @@ -321,12 +323,12 @@ def delete_data_flow_debug_session( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(request, 'DeleteDataFlowDebugSessionRequest') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -351,6 +353,7 @@ def _execute_command_initial( error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self._execute_command_initial.metadata['url'] # type: ignore @@ -366,13 +369,12 @@ def _execute_command_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(request, 'DataFlowDebugCommandRequest') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_data_flow_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_data_flow_operations.py index 7b6e54e01416..83d0ef3b07f9 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_data_flow_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_data_flow_operations.py @@ -12,12 +12,14 @@ from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.polling.base_polling import LROBasePolling from .. import models if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -44,38 +46,25 @@ def __init__(self, client, config, serializer, deserializer): self._deserialize = deserializer self._config = config - def create_or_update_data_flow( + def _create_or_update_data_flow_initial( self, data_flow_name, # type: str properties, # type: "models.DataFlow" if_match=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> "models.DataFlowResource" - """Creates or updates a data flow. - - :param data_flow_name: The data flow name. - :type data_flow_name: str - :param properties: Data flow properties. - :type properties: ~azure.synapse.artifacts.models.DataFlow - :param if_match: ETag of the data flow entity. Should only be specified for update, for which - it should match existing entity or can be * for unconditional update. - :type if_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: DataFlowResource, or the result of cls(response) - :rtype: ~azure.synapse.artifacts.models.DataFlowResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowResource"] + # type: (...) -> Optional["models.DataFlowResource"] + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.DataFlowResource"]] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) _data_flow = models.DataFlowResource(properties=properties) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL - url = self.create_or_update_data_flow.metadata['url'] # type: ignore + url = self._create_or_update_data_flow_initial.metadata['url'] # type: ignore path_format_arguments = { 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), 'dataFlowName': self._serialize.url("data_flow_name", data_flow_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), @@ -91,28 +80,96 @@ def create_or_update_data_flow( if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(_data_flow, 'DataFlowResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [200]: + if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.CloudError, response) raise HttpResponseError(response=response, model=error) - deserialized = self._deserialize('DataFlowResource', pipeline_response) + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('DataFlowResource', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - create_or_update_data_flow.metadata = {'url': '/dataflows/{dataFlowName}'} # type: ignore + _create_or_update_data_flow_initial.metadata = {'url': '/dataflows/{dataFlowName}'} # type: ignore + + def begin_create_or_update_data_flow( + self, + data_flow_name, # type: str + properties, # type: "models.DataFlow" + if_match=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> LROPoller["models.DataFlowResource"] + """Creates or updates a data flow. + + :param data_flow_name: The data flow name. + :type data_flow_name: str + :param properties: Data flow properties. + :type properties: ~azure.synapse.artifacts.models.DataFlow + :param if_match: ETag of the data flow entity. Should only be specified for update, for which + it should match existing entity or can be * for unconditional update. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either DataFlowResource or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.synapse.artifacts.models.DataFlowResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', False) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowResource"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_or_update_data_flow_initial( + data_flow_name=data_flow_name, + properties=properties, + if_match=if_match, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('DataFlowResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: polling_method = LROBasePolling(lro_delay, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_create_or_update_data_flow.metadata = {'url': '/dataflows/{dataFlowName}'} # type: ignore def get_data_flow( self, @@ -137,6 +194,7 @@ def get_data_flow( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self.get_data_flow.metadata['url'] # type: ignore @@ -154,7 +212,7 @@ def get_data_flow( header_parameters = {} # type: Dict[str, Any] if if_none_match is not None: header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -173,28 +231,20 @@ def get_data_flow( return deserialized get_data_flow.metadata = {'url': '/dataflows/{dataFlowName}'} # type: ignore - def delete_data_flow( + def _delete_data_flow_initial( self, data_flow_name, # type: str **kwargs # type: Any ): # type: (...) -> None - """Deletes a data flow. - - :param data_flow_name: The data flow name. - :type data_flow_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL - url = self.delete_data_flow.metadata['url'] # type: ignore + url = self._delete_data_flow_initial.metadata['url'] # type: ignore path_format_arguments = { 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), 'dataFlowName': self._serialize.url("data_flow_name", data_flow_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), @@ -207,12 +257,13 @@ def delete_data_flow( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [200, 204]: + if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.CloudError, response) raise HttpResponseError(response=response, model=error) @@ -220,7 +271,62 @@ def delete_data_flow( if cls: return cls(pipeline_response, None, {}) - delete_data_flow.metadata = {'url': '/dataflows/{dataFlowName}'} # type: ignore + _delete_data_flow_initial.metadata = {'url': '/dataflows/{dataFlowName}'} # type: ignore + + def begin_delete_data_flow( + self, + data_flow_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> LROPoller[None] + """Deletes a data flow. + + :param data_flow_name: The data flow name. + :type data_flow_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', False) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_data_flow_initial( + data_flow_name=data_flow_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: polling_method = LROBasePolling(lro_delay, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_delete_data_flow.metadata = {'url': '/dataflows/{dataFlowName}'} # type: ignore def get_data_flows_by_workspace( self, @@ -238,11 +344,12 @@ def get_data_flows_by_workspace( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_dataset_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_dataset_operations.py index b8ea8c5b1874..47cab4d4fa17 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_dataset_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_dataset_operations.py @@ -12,12 +12,14 @@ from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.polling.base_polling import LROBasePolling from .. import models if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -60,11 +62,12 @@ def get_datasets_by_workspace( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -113,38 +116,25 @@ def get_next(next_link=None): ) get_datasets_by_workspace.metadata = {'url': '/datasets'} # type: ignore - def create_or_update_dataset( + def _create_or_update_dataset_initial( self, dataset_name, # type: str properties, # type: "models.Dataset" if_match=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> "models.DatasetResource" - """Creates or updates a dataset. - - :param dataset_name: The dataset name. - :type dataset_name: str - :param properties: Dataset properties. - :type properties: ~azure.synapse.artifacts.models.Dataset - :param if_match: ETag of the dataset entity. Should only be specified for update, for which it - should match existing entity or can be * for unconditional update. - :type if_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: DatasetResource, or the result of cls(response) - :rtype: ~azure.synapse.artifacts.models.DatasetResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.DatasetResource"] + # type: (...) -> Optional["models.DatasetResource"] + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.DatasetResource"]] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) _dataset = models.DatasetResource(properties=properties) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL - url = self.create_or_update_dataset.metadata['url'] # type: ignore + url = self._create_or_update_dataset_initial.metadata['url'] # type: ignore path_format_arguments = { 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), 'datasetName': self._serialize.url("dataset_name", dataset_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), @@ -160,28 +150,96 @@ def create_or_update_dataset( if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(_dataset, 'DatasetResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [200]: + if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.CloudError, response) raise HttpResponseError(response=response, model=error) - deserialized = self._deserialize('DatasetResource', pipeline_response) + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('DatasetResource', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - create_or_update_dataset.metadata = {'url': '/datasets/{datasetName}'} # type: ignore + _create_or_update_dataset_initial.metadata = {'url': '/datasets/{datasetName}'} # type: ignore + + def begin_create_or_update_dataset( + self, + dataset_name, # type: str + properties, # type: "models.Dataset" + if_match=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> LROPoller["models.DatasetResource"] + """Creates or updates a dataset. + + :param dataset_name: The dataset name. + :type dataset_name: str + :param properties: Dataset properties. + :type properties: ~azure.synapse.artifacts.models.Dataset + :param if_match: ETag of the dataset entity. Should only be specified for update, for which it + should match existing entity or can be * for unconditional update. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either DatasetResource or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.synapse.artifacts.models.DatasetResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', False) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["models.DatasetResource"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_or_update_dataset_initial( + dataset_name=dataset_name, + properties=properties, + if_match=if_match, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('DatasetResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: polling_method = LROBasePolling(lro_delay, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_create_or_update_dataset.metadata = {'url': '/datasets/{datasetName}'} # type: ignore def get_dataset( self, @@ -206,6 +264,7 @@ def get_dataset( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self.get_dataset.metadata['url'] # type: ignore @@ -223,7 +282,7 @@ def get_dataset( header_parameters = {} # type: Dict[str, Any] if if_none_match is not None: header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -244,28 +303,20 @@ def get_dataset( return deserialized get_dataset.metadata = {'url': '/datasets/{datasetName}'} # type: ignore - def delete_dataset( + def _delete_dataset_initial( self, dataset_name, # type: str **kwargs # type: Any ): # type: (...) -> None - """Deletes a dataset. - - :param dataset_name: The dataset name. - :type dataset_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL - url = self.delete_dataset.metadata['url'] # type: ignore + url = self._delete_dataset_initial.metadata['url'] # type: ignore path_format_arguments = { 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), 'datasetName': self._serialize.url("dataset_name", dataset_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), @@ -278,12 +329,13 @@ def delete_dataset( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [200, 204]: + if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.CloudError, response) raise HttpResponseError(response=response, model=error) @@ -291,4 +343,59 @@ def delete_dataset( if cls: return cls(pipeline_response, None, {}) - delete_dataset.metadata = {'url': '/datasets/{datasetName}'} # type: ignore + _delete_dataset_initial.metadata = {'url': '/datasets/{datasetName}'} # type: ignore + + def begin_delete_dataset( + self, + dataset_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> LROPoller[None] + """Deletes a dataset. + + :param dataset_name: The dataset name. + :type dataset_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', False) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_dataset_initial( + dataset_name=dataset_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: polling_method = LROBasePolling(lro_delay, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_delete_dataset.metadata = {'url': '/datasets/{datasetName}'} # type: ignore diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_linked_service_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_linked_service_operations.py index 81745ded6b96..763f281bccaa 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_linked_service_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_linked_service_operations.py @@ -12,12 +12,14 @@ from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.polling.base_polling import LROBasePolling from .. import models if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -60,11 +62,12 @@ def get_linked_services_by_workspace( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -113,38 +116,25 @@ def get_next(next_link=None): ) get_linked_services_by_workspace.metadata = {'url': '/linkedservices'} # type: ignore - def create_or_update_linked_service( + def _create_or_update_linked_service_initial( self, linked_service_name, # type: str properties, # type: "models.LinkedService" if_match=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> "models.LinkedServiceResource" - """Creates or updates a linked service. - - :param linked_service_name: The linked service name. - :type linked_service_name: str - :param properties: Properties of linked service. - :type properties: ~azure.synapse.artifacts.models.LinkedService - :param if_match: ETag of the linkedService entity. Should only be specified for update, for - which it should match existing entity or can be * for unconditional update. - :type if_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: LinkedServiceResource, or the result of cls(response) - :rtype: ~azure.synapse.artifacts.models.LinkedServiceResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.LinkedServiceResource"] + # type: (...) -> Optional["models.LinkedServiceResource"] + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.LinkedServiceResource"]] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) _linked_service = models.LinkedServiceResource(properties=properties) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL - url = self.create_or_update_linked_service.metadata['url'] # type: ignore + url = self._create_or_update_linked_service_initial.metadata['url'] # type: ignore path_format_arguments = { 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), 'linkedServiceName': self._serialize.url("linked_service_name", linked_service_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), @@ -160,28 +150,96 @@ def create_or_update_linked_service( if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(_linked_service, 'LinkedServiceResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [200]: + if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.CloudError, response) raise HttpResponseError(response=response, model=error) - deserialized = self._deserialize('LinkedServiceResource', pipeline_response) + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('LinkedServiceResource', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - create_or_update_linked_service.metadata = {'url': '/linkedservices/{linkedServiceName}'} # type: ignore + _create_or_update_linked_service_initial.metadata = {'url': '/linkedservices/{linkedServiceName}'} # type: ignore + + def begin_create_or_update_linked_service( + self, + linked_service_name, # type: str + properties, # type: "models.LinkedService" + if_match=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> LROPoller["models.LinkedServiceResource"] + """Creates or updates a linked service. + + :param linked_service_name: The linked service name. + :type linked_service_name: str + :param properties: Properties of linked service. + :type properties: ~azure.synapse.artifacts.models.LinkedService + :param if_match: ETag of the linkedService entity. Should only be specified for update, for + which it should match existing entity or can be * for unconditional update. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either LinkedServiceResource or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.synapse.artifacts.models.LinkedServiceResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', False) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["models.LinkedServiceResource"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_or_update_linked_service_initial( + linked_service_name=linked_service_name, + properties=properties, + if_match=if_match, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('LinkedServiceResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: polling_method = LROBasePolling(lro_delay, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_create_or_update_linked_service.metadata = {'url': '/linkedservices/{linkedServiceName}'} # type: ignore def get_linked_service( self, @@ -207,6 +265,7 @@ def get_linked_service( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self.get_linked_service.metadata['url'] # type: ignore @@ -224,7 +283,7 @@ def get_linked_service( header_parameters = {} # type: Dict[str, Any] if if_none_match is not None: header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -245,28 +304,20 @@ def get_linked_service( return deserialized get_linked_service.metadata = {'url': '/linkedservices/{linkedServiceName}'} # type: ignore - def delete_linked_service( + def _delete_linked_service_initial( self, linked_service_name, # type: str **kwargs # type: Any ): # type: (...) -> None - """Deletes a linked service. - - :param linked_service_name: The linked service name. - :type linked_service_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL - url = self.delete_linked_service.metadata['url'] # type: ignore + url = self._delete_linked_service_initial.metadata['url'] # type: ignore path_format_arguments = { 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), 'linkedServiceName': self._serialize.url("linked_service_name", linked_service_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), @@ -279,12 +330,13 @@ def delete_linked_service( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [200, 204]: + if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.CloudError, response) raise HttpResponseError(response=response, model=error) @@ -292,4 +344,59 @@ def delete_linked_service( if cls: return cls(pipeline_response, None, {}) - delete_linked_service.metadata = {'url': '/linkedservices/{linkedServiceName}'} # type: ignore + _delete_linked_service_initial.metadata = {'url': '/linkedservices/{linkedServiceName}'} # type: ignore + + def begin_delete_linked_service( + self, + linked_service_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> LROPoller[None] + """Deletes a linked service. + + :param linked_service_name: The linked service name. + :type linked_service_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', False) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_linked_service_initial( + linked_service_name=linked_service_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: polling_method = LROBasePolling(lro_delay, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_delete_linked_service.metadata = {'url': '/linkedservices/{linkedServiceName}'} # type: ignore diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_notebook_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_notebook_operations.py index a9bfb3cb4812..1594c92e9249 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_notebook_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_notebook_operations.py @@ -12,12 +12,14 @@ from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.polling.base_polling import LROBasePolling from .. import models if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -60,11 +62,12 @@ def get_notebooks_by_workspace( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -129,11 +132,12 @@ def get_notebook_summary_by_work_space( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -182,38 +186,25 @@ def get_next(next_link=None): ) get_notebook_summary_by_work_space.metadata = {'url': '/notebooks/summary'} # type: ignore - def create_or_update_notebook( + def _create_or_update_notebook_initial( self, notebook_name, # type: str properties, # type: "models.Notebook" if_match=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> "models.NotebookResource" - """Creates or updates a Note Book. - - :param notebook_name: The notebook name. - :type notebook_name: str - :param properties: Properties of Notebook. - :type properties: ~azure.synapse.artifacts.models.Notebook - :param if_match: ETag of the Note book entity. Should only be specified for update, for which - it should match existing entity or can be * for unconditional update. - :type if_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: NotebookResource, or the result of cls(response) - :rtype: ~azure.synapse.artifacts.models.NotebookResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.NotebookResource"] + # type: (...) -> Optional["models.NotebookResource"] + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.NotebookResource"]] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) _notebook = models.NotebookResource(properties=properties) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL - url = self.create_or_update_notebook.metadata['url'] # type: ignore + url = self._create_or_update_notebook_initial.metadata['url'] # type: ignore path_format_arguments = { 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), 'notebookName': self._serialize.url("notebook_name", notebook_name, 'str'), @@ -229,28 +220,96 @@ def create_or_update_notebook( if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(_notebook, 'NotebookResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [200]: + if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.CloudError, response) raise HttpResponseError(response=response, model=error) - deserialized = self._deserialize('NotebookResource', pipeline_response) + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('NotebookResource', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - create_or_update_notebook.metadata = {'url': '/notebooks/{notebookName}'} # type: ignore + _create_or_update_notebook_initial.metadata = {'url': '/notebooks/{notebookName}'} # type: ignore + + def begin_create_or_update_notebook( + self, + notebook_name, # type: str + properties, # type: "models.Notebook" + if_match=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> LROPoller["models.NotebookResource"] + """Creates or updates a Note Book. + + :param notebook_name: The notebook name. + :type notebook_name: str + :param properties: Properties of Notebook. + :type properties: ~azure.synapse.artifacts.models.Notebook + :param if_match: ETag of the Note book entity. Should only be specified for update, for which + it should match existing entity or can be * for unconditional update. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either NotebookResource or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.synapse.artifacts.models.NotebookResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', False) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["models.NotebookResource"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_or_update_notebook_initial( + notebook_name=notebook_name, + properties=properties, + if_match=if_match, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('NotebookResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: polling_method = LROBasePolling(lro_delay, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_create_or_update_notebook.metadata = {'url': '/notebooks/{notebookName}'} # type: ignore def get_notebook( self, @@ -275,6 +334,7 @@ def get_notebook( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self.get_notebook.metadata['url'] # type: ignore @@ -292,7 +352,7 @@ def get_notebook( header_parameters = {} # type: Dict[str, Any] if if_none_match is not None: header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -313,28 +373,20 @@ def get_notebook( return deserialized get_notebook.metadata = {'url': '/notebooks/{notebookName}'} # type: ignore - def delete_notebook( + def _delete_notebook_initial( self, notebook_name, # type: str **kwargs # type: Any ): # type: (...) -> None - """Deletes a Note book. - - :param notebook_name: The notebook name. - :type notebook_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL - url = self.delete_notebook.metadata['url'] # type: ignore + url = self._delete_notebook_initial.metadata['url'] # type: ignore path_format_arguments = { 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), 'notebookName': self._serialize.url("notebook_name", notebook_name, 'str'), @@ -347,12 +399,13 @@ def delete_notebook( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [200, 204]: + if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.CloudError, response) raise HttpResponseError(response=response, model=error) @@ -360,4 +413,59 @@ def delete_notebook( if cls: return cls(pipeline_response, None, {}) - delete_notebook.metadata = {'url': '/notebooks/{notebookName}'} # type: ignore + _delete_notebook_initial.metadata = {'url': '/notebooks/{notebookName}'} # type: ignore + + def begin_delete_notebook( + self, + notebook_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> LROPoller[None] + """Deletes a Note book. + + :param notebook_name: The notebook name. + :type notebook_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', False) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_notebook_initial( + notebook_name=notebook_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: polling_method = LROBasePolling(lro_delay, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_delete_notebook.metadata = {'url': '/notebooks/{notebookName}'} # type: ignore diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_pipeline_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_pipeline_operations.py index 67076af5c3fa..0a0d5114de9a 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_pipeline_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_pipeline_operations.py @@ -12,12 +12,14 @@ from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.polling.base_polling import LROBasePolling from .. import models if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -60,11 +62,12 @@ def get_pipelines_by_workspace( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -113,36 +116,23 @@ def get_next(next_link=None): ) get_pipelines_by_workspace.metadata = {'url': '/pipelines'} # type: ignore - def create_or_update_pipeline( + def _create_or_update_pipeline_initial( self, pipeline_name, # type: str pipeline, # type: "models.PipelineResource" if_match=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> "models.PipelineResource" - """Creates or updates a pipeline. - - :param pipeline_name: The pipeline name. - :type pipeline_name: str - :param pipeline: Pipeline resource definition. - :type pipeline: ~azure.synapse.artifacts.models.PipelineResource - :param if_match: ETag of the pipeline entity. Should only be specified for update, for which - it should match existing entity or can be * for unconditional update. - :type if_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: PipelineResource, or the result of cls(response) - :rtype: ~azure.synapse.artifacts.models.PipelineResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineResource"] + # type: (...) -> Optional["models.PipelineResource"] + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.PipelineResource"]] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL - url = self.create_or_update_pipeline.metadata['url'] # type: ignore + url = self._create_or_update_pipeline_initial.metadata['url'] # type: ignore path_format_arguments = { 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), 'pipelineName': self._serialize.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), @@ -158,28 +148,96 @@ def create_or_update_pipeline( if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(pipeline, 'PipelineResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [200]: + if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.CloudError, response) raise HttpResponseError(response=response, model=error) - deserialized = self._deserialize('PipelineResource', pipeline_response) + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('PipelineResource', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - create_or_update_pipeline.metadata = {'url': '/pipelines/{pipelineName}'} # type: ignore + _create_or_update_pipeline_initial.metadata = {'url': '/pipelines/{pipelineName}'} # type: ignore + + def begin_create_or_update_pipeline( + self, + pipeline_name, # type: str + pipeline, # type: "models.PipelineResource" + if_match=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> LROPoller["models.PipelineResource"] + """Creates or updates a pipeline. + + :param pipeline_name: The pipeline name. + :type pipeline_name: str + :param pipeline: Pipeline resource definition. + :type pipeline: ~azure.synapse.artifacts.models.PipelineResource + :param if_match: ETag of the pipeline entity. Should only be specified for update, for which + it should match existing entity or can be * for unconditional update. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either PipelineResource or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.synapse.artifacts.models.PipelineResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', False) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineResource"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_or_update_pipeline_initial( + pipeline_name=pipeline_name, + pipeline=pipeline, + if_match=if_match, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('PipelineResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: polling_method = LROBasePolling(lro_delay, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_create_or_update_pipeline.metadata = {'url': '/pipelines/{pipelineName}'} # type: ignore def get_pipeline( self, @@ -204,6 +262,7 @@ def get_pipeline( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self.get_pipeline.metadata['url'] # type: ignore @@ -221,7 +280,7 @@ def get_pipeline( header_parameters = {} # type: Dict[str, Any] if if_none_match is not None: header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -242,28 +301,20 @@ def get_pipeline( return deserialized get_pipeline.metadata = {'url': '/pipelines/{pipelineName}'} # type: ignore - def delete_pipeline( + def _delete_pipeline_initial( self, pipeline_name, # type: str **kwargs # type: Any ): # type: (...) -> None - """Deletes a pipeline. - - :param pipeline_name: The pipeline name. - :type pipeline_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL - url = self.delete_pipeline.metadata['url'] # type: ignore + url = self._delete_pipeline_initial.metadata['url'] # type: ignore path_format_arguments = { 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), 'pipelineName': self._serialize.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), @@ -276,12 +327,13 @@ def delete_pipeline( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [200, 204]: + if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.CloudError, response) raise HttpResponseError(response=response, model=error) @@ -289,7 +341,62 @@ def delete_pipeline( if cls: return cls(pipeline_response, None, {}) - delete_pipeline.metadata = {'url': '/pipelines/{pipelineName}'} # type: ignore + _delete_pipeline_initial.metadata = {'url': '/pipelines/{pipelineName}'} # type: ignore + + def begin_delete_pipeline( + self, + pipeline_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> LROPoller[None] + """Deletes a pipeline. + + :param pipeline_name: The pipeline name. + :type pipeline_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', False) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_pipeline_initial( + pipeline_name=pipeline_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: polling_method = LROBasePolling(lro_delay, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_delete_pipeline.metadata = {'url': '/pipelines/{pipelineName}'} # type: ignore def create_pipeline_run( self, @@ -327,6 +434,7 @@ def create_pipeline_run( error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.create_pipeline_run.metadata['url'] # type: ignore @@ -349,7 +457,7 @@ def create_pipeline_run( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] if parameters is not None: @@ -358,11 +466,10 @@ def create_pipeline_run( body_content = None body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [200]: + if response.status_code not in [202]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.CloudError, response) raise HttpResponseError(response=response, model=error) diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_pipeline_run_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_pipeline_run_operations.py index e38dcb17fb15..b70337659f64 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_pipeline_run_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_pipeline_run_operations.py @@ -63,6 +63,7 @@ def query_pipeline_runs_by_workspace( error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.query_pipeline_runs_by_workspace.metadata['url'] # type: ignore @@ -78,13 +79,12 @@ def query_pipeline_runs_by_workspace( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(filter_parameters, 'RunFilterParameters') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -120,6 +120,7 @@ def get_pipeline_run( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self.get_pipeline_run.metadata['url'] # type: ignore @@ -135,7 +136,7 @@ def get_pipeline_run( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -180,6 +181,7 @@ def query_activity_runs( error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.query_activity_runs.metadata['url'] # type: ignore @@ -197,13 +199,12 @@ def query_activity_runs( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(filter_parameters, 'RunFilterParameters') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -243,6 +244,7 @@ def cancel_pipeline_run( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self.cancel_pipeline_run.metadata['url'] # type: ignore @@ -260,6 +262,7 @@ def cancel_pipeline_run( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_spark_job_definition_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_spark_job_definition_operations.py index 8051a81d8afc..25d404cab281 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_spark_job_definition_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_spark_job_definition_operations.py @@ -62,11 +62,12 @@ def get_spark_job_definitions_by_workspace( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -144,6 +145,7 @@ def create_or_update_spark_job_definition( _spark_job_definition = models.SparkJobDefinitionResource(properties=properties) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.create_or_update_spark_job_definition.metadata['url'] # type: ignore @@ -162,13 +164,12 @@ def create_or_update_spark_job_definition( if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(_spark_job_definition, 'SparkJobDefinitionResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -209,6 +210,7 @@ def get_spark_job_definition( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self.get_spark_job_definition.metadata['url'] # type: ignore @@ -226,7 +228,7 @@ def get_spark_job_definition( header_parameters = {} # type: Dict[str, Any] if if_none_match is not None: header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -266,6 +268,7 @@ def delete_spark_job_definition( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self.delete_spark_job_definition.metadata['url'] # type: ignore @@ -281,6 +284,7 @@ def delete_spark_job_definition( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -306,6 +310,7 @@ def _execute_spark_job_definition_initial( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self._execute_spark_job_definition_initial.metadata['url'] # type: ignore @@ -321,7 +326,7 @@ def _execute_spark_job_definition_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -415,6 +420,7 @@ def _debug_spark_job_definition_initial( _spark_job_definition_azure_resource = models.SparkJobDefinitionResource(properties=properties) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self._debug_spark_job_definition_initial.metadata['url'] # type: ignore @@ -430,13 +436,12 @@ def _debug_spark_job_definition_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(_spark_job_definition_azure_resource, 'SparkJobDefinitionResource') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_sql_script_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_sql_script_operations.py index 78a44563ddd3..c13ee57ea579 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_sql_script_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_sql_script_operations.py @@ -60,11 +60,12 @@ def get_sql_scripts_by_workspace( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -142,6 +143,7 @@ def create_or_update_sql_script( _sql_script = models.SqlScriptResource(properties=properties) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.create_or_update_sql_script.metadata['url'] # type: ignore @@ -160,13 +162,12 @@ def create_or_update_sql_script( if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(_sql_script, 'SqlScriptResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -206,6 +207,7 @@ def get_sql_script( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self.get_sql_script.metadata['url'] # type: ignore @@ -223,7 +225,7 @@ def get_sql_script( header_parameters = {} # type: Dict[str, Any] if if_none_match is not None: header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -263,6 +265,7 @@ def delete_sql_script( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self.delete_sql_script.metadata['url'] # type: ignore @@ -278,6 +281,7 @@ def delete_sql_script( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_trigger_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_trigger_operations.py index b403f524caa4..4facb9084857 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_trigger_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_trigger_operations.py @@ -62,11 +62,12 @@ def get_triggers_by_workspace( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -115,38 +116,25 @@ def get_next(next_link=None): ) get_triggers_by_workspace.metadata = {'url': '/triggers'} # type: ignore - def create_or_update_trigger( + def _create_or_update_trigger_initial( self, trigger_name, # type: str properties, # type: "models.Trigger" if_match=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> "models.TriggerResource" - """Creates or updates a trigger. - - :param trigger_name: The trigger name. - :type trigger_name: str - :param properties: Properties of the trigger. - :type properties: ~azure.synapse.artifacts.models.Trigger - :param if_match: ETag of the trigger entity. Should only be specified for update, for which it - should match existing entity or can be * for unconditional update. - :type if_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: TriggerResource, or the result of cls(response) - :rtype: ~azure.synapse.artifacts.models.TriggerResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerResource"] + # type: (...) -> Optional["models.TriggerResource"] + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerResource"]] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) _trigger = models.TriggerResource(properties=properties) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL - url = self.create_or_update_trigger.metadata['url'] # type: ignore + url = self._create_or_update_trigger_initial.metadata['url'] # type: ignore path_format_arguments = { 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), @@ -162,28 +150,96 @@ def create_or_update_trigger( if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(_trigger, 'TriggerResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [200]: + if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.CloudError, response) raise HttpResponseError(response=response, model=error) - deserialized = self._deserialize('TriggerResource', pipeline_response) + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('TriggerResource', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - create_or_update_trigger.metadata = {'url': '/triggers/{triggerName}'} # type: ignore + _create_or_update_trigger_initial.metadata = {'url': '/triggers/{triggerName}'} # type: ignore + + def begin_create_or_update_trigger( + self, + trigger_name, # type: str + properties, # type: "models.Trigger" + if_match=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> LROPoller["models.TriggerResource"] + """Creates or updates a trigger. + + :param trigger_name: The trigger name. + :type trigger_name: str + :param properties: Properties of the trigger. + :type properties: ~azure.synapse.artifacts.models.Trigger + :param if_match: ETag of the trigger entity. Should only be specified for update, for which it + should match existing entity or can be * for unconditional update. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either TriggerResource or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.synapse.artifacts.models.TriggerResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', False) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerResource"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_or_update_trigger_initial( + trigger_name=trigger_name, + properties=properties, + if_match=if_match, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('TriggerResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: polling_method = LROBasePolling(lro_delay, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_create_or_update_trigger.metadata = {'url': '/triggers/{triggerName}'} # type: ignore def get_trigger( self, @@ -208,6 +264,7 @@ def get_trigger( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self.get_trigger.metadata['url'] # type: ignore @@ -225,7 +282,7 @@ def get_trigger( header_parameters = {} # type: Dict[str, Any] if if_none_match is not None: header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -246,28 +303,20 @@ def get_trigger( return deserialized get_trigger.metadata = {'url': '/triggers/{triggerName}'} # type: ignore - def delete_trigger( + def _delete_trigger_initial( self, trigger_name, # type: str **kwargs # type: Any ): # type: (...) -> None - """Deletes a trigger. - - :param trigger_name: The trigger name. - :type trigger_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL - url = self.delete_trigger.metadata['url'] # type: ignore + url = self._delete_trigger_initial.metadata['url'] # type: ignore path_format_arguments = { 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), @@ -280,12 +329,13 @@ def delete_trigger( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [200, 204]: + if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.CloudError, response) raise HttpResponseError(response=response, model=error) @@ -293,7 +343,62 @@ def delete_trigger( if cls: return cls(pipeline_response, None, {}) - delete_trigger.metadata = {'url': '/triggers/{triggerName}'} # type: ignore + _delete_trigger_initial.metadata = {'url': '/triggers/{triggerName}'} # type: ignore + + def begin_delete_trigger( + self, + trigger_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> LROPoller[None] + """Deletes a trigger. + + :param trigger_name: The trigger name. + :type trigger_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', False) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_trigger_initial( + trigger_name=trigger_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: polling_method = LROBasePolling(lro_delay, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_delete_trigger.metadata = {'url': '/triggers/{triggerName}'} # type: ignore def _subscribe_trigger_to_events_initial( self, @@ -305,6 +410,7 @@ def _subscribe_trigger_to_events_initial( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self._subscribe_trigger_to_events_initial.metadata['url'] # type: ignore @@ -320,7 +426,7 @@ def _subscribe_trigger_to_events_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -418,6 +524,7 @@ def get_event_subscription_status( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self.get_event_subscription_status.metadata['url'] # type: ignore @@ -433,7 +540,7 @@ def get_event_subscription_status( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -462,6 +569,7 @@ def _unsubscribe_trigger_from_events_initial( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self._unsubscribe_trigger_from_events_initial.metadata['url'] # type: ignore @@ -477,7 +585,7 @@ def _unsubscribe_trigger_from_events_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -566,6 +674,7 @@ def _start_trigger_initial( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self._start_trigger_initial.metadata['url'] # type: ignore @@ -581,6 +690,7 @@ def _start_trigger_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -661,6 +771,7 @@ def _stop_trigger_initial( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self._stop_trigger_initial.metadata['url'] # type: ignore @@ -676,6 +787,7 @@ def _stop_trigger_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_trigger_run_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_trigger_run_operations.py index 5d34234f1b10..1b9b836166ad 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_trigger_run_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_trigger_run_operations.py @@ -65,6 +65,7 @@ def rerun_trigger_instance( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self.rerun_trigger_instance.metadata['url'] # type: ignore @@ -81,6 +82,7 @@ def rerun_trigger_instance( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -96,6 +98,61 @@ def rerun_trigger_instance( rerun_trigger_instance.metadata = {'url': '/triggers/{triggerName}/triggerRuns/{runId}/rerun'} # type: ignore + def cancel_trigger_instance( + self, + trigger_name, # type: str + run_id, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + """Cancel single trigger instance by runId. + + :param trigger_name: The trigger name. + :type trigger_name: str + :param run_id: The pipeline run identifier. + :type run_id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2019-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.cancel_trigger_instance.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + 'runId': self._serialize.url("run_id", run_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.CloudError, response) + raise HttpResponseError(response=response, model=error) + + if cls: + return cls(pipeline_response, None, {}) + + cancel_trigger_instance.metadata = {'url': '/triggers/{triggerName}/triggerRuns/{runId}/cancel'} # type: ignore + def query_trigger_runs_by_workspace( self, filter_parameters, # type: "models.RunFilterParameters" @@ -116,6 +173,7 @@ def query_trigger_runs_by_workspace( error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.query_trigger_runs_by_workspace.metadata['url'] # type: ignore @@ -131,13 +189,12 @@ def query_trigger_runs_by_workspace( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(filter_parameters, 'RunFilterParameters') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response diff --git a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/__init__.py b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/__init__.py index 225215d0675c..5b37860b9ba0 100644 --- a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/__init__.py +++ b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/__init__.py @@ -13,7 +13,7 @@ __all__ = ['SparkClient'] try: - from ._patch import patch_sdk + from ._patch import patch_sdk # type: ignore patch_sdk() except ImportError: pass diff --git a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/_configuration.py b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/_configuration.py index c846fd6d4cc4..574b3dcae0f8 100644 --- a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/_configuration.py +++ b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/_configuration.py @@ -59,8 +59,7 @@ def __init__( self.endpoint = endpoint self.spark_pool_name = spark_pool_name self.livy_api_version = livy_api_version - self.credential_scopes = ['https://dev.azuresynapse.net/.default'] - self.credential_scopes.extend(kwargs.pop('credential_scopes', [])) + self.credential_scopes = kwargs.pop('credential_scopes', ['https://dev.azuresynapse.net/.default']) kwargs.setdefault('sdk_moniker', 'synapse/{}'.format(VERSION)) self._configure(**kwargs) @@ -73,6 +72,7 @@ def _configure( self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get('http_logging_policy') or policies.HttpLoggingPolicy(**kwargs) self.retry_policy = kwargs.get('retry_policy') or policies.RetryPolicy(**kwargs) self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) self.redirect_policy = kwargs.get('redirect_policy') or policies.RedirectPolicy(**kwargs) diff --git a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/_metadata.json b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/_metadata.json deleted file mode 100644 index 8eff44cc11b7..000000000000 --- a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/_metadata.json +++ /dev/null @@ -1,78 +0,0 @@ -{ - "chosen_version": "2019-11-01-preview", - "total_api_version_list": ["2019-11-01-preview"], - "client": { - "name": "SparkClient", - "filename": "_spark_client", - "description": "SparkClient." - }, - "global_parameters": { - "sync_method": { - "credential": { - "method_signature": "credential, # type: \"TokenCredential\"", - "description": "Credential needed for the client to connect to Azure.", - "docstring_type": "~azure.core.credentials.TokenCredential", - "required": true - }, - "endpoint": { - "method_signature": "endpoint, # type: str", - "description": "The workspace development endpoint, for example https://myworkspace.dev.azuresynapse.net.", - "docstring_type": "str", - "required": true - }, - "spark_pool_name": { - "method_signature": "spark_pool_name, # type: str", - "description": "Name of the spark pool.", - "docstring_type": "str", - "required": true - }, - "livy_api_version": { - "method_signature": "livy_api_version=\"2019-11-01-preview\", # type: str", - "description": "Valid api-version for the request.", - "docstring_type": "str", - "required": true - } - }, - "async_method": { - "credential": { - "method_signature": "credential, # type: \"AsyncTokenCredential\"", - "description": "Credential needed for the client to connect to Azure.", - "docstring_type": "~azure.core.credentials_async.AsyncTokenCredential", - "required": true - }, - "endpoint": { - "method_signature": "endpoint, # type: str", - "description": "The workspace development endpoint, for example https://myworkspace.dev.azuresynapse.net.", - "docstring_type": "str", - "required": true - }, - "spark_pool_name": { - "method_signature": "spark_pool_name, # type: str", - "description": "Name of the spark pool.", - "docstring_type": "str", - "required": true - }, - "livy_api_version": { - "method_signature": "livy_api_version=\"2019-11-01-preview\", # type: str", - "description": "Valid api-version for the request.", - "docstring_type": "str", - "required": true - } - }, - "constant": { - }, - "call": "credential, endpoint, spark_pool_name, livy_api_version" - }, - "config": { - "credential": true, - "credential_scopes": ["https://dev.azuresynapse.net/.default"] - }, - "operation_groups": { - "spark_batch": "SparkBatchOperations", - "spark_session": "SparkSessionOperations" - }, - "operation_mixins": { - }, - "sync_imports": "None", - "async_imports": "None" -} \ No newline at end of file diff --git a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/_spark_client.py b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/_spark_client.py index 7b3664b1ce60..5d4e57c46569 100644 --- a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/_spark_client.py +++ b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/_spark_client.py @@ -38,7 +38,6 @@ class SparkClient(object): :type spark_pool_name: str :param livy_api_version: Valid api-version for the request. :type livy_api_version: str - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. """ def __init__( diff --git a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/_version.py b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/_version.py index 035146e99a22..eae7c95b6fbd 100644 --- a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/_version.py +++ b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/_version.py @@ -6,4 +6,4 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "0.2.0" +VERSION = "0.1.0" diff --git a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/__init__.py b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/__init__.py index a4411e9376f3..04ee8cc75d0e 100644 --- a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/__init__.py +++ b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/__init__.py @@ -6,5 +6,5 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from ._spark_client_async import SparkClient +from ._spark_client import SparkClient __all__ = ['SparkClient'] diff --git a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/_configuration_async.py b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/_configuration.py similarity index 94% rename from sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/_configuration_async.py rename to sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/_configuration.py index 25bc033d8161..3d748483e56f 100644 --- a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/_configuration_async.py +++ b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/_configuration.py @@ -56,8 +56,7 @@ def __init__( self.endpoint = endpoint self.spark_pool_name = spark_pool_name self.livy_api_version = livy_api_version - self.credential_scopes = ['https://dev.azuresynapse.net/.default'] - self.credential_scopes.extend(kwargs.pop('credential_scopes', [])) + self.credential_scopes = kwargs.pop('credential_scopes', ['https://dev.azuresynapse.net/.default']) kwargs.setdefault('sdk_moniker', 'synapse/{}'.format(VERSION)) self._configure(**kwargs) @@ -69,6 +68,7 @@ def _configure( self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get('http_logging_policy') or policies.HttpLoggingPolicy(**kwargs) self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs) self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs) diff --git a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/_spark_client_async.py b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/_spark_client.py similarity index 84% rename from sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/_spark_client_async.py rename to sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/_spark_client.py index 7ba873de289f..1bbd03a040ce 100644 --- a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/_spark_client_async.py +++ b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/_spark_client.py @@ -15,9 +15,9 @@ # pylint: disable=unused-import,ungrouped-imports from azure.core.credentials_async import AsyncTokenCredential -from ._configuration_async import SparkClientConfiguration -from .operations_async import SparkBatchOperations -from .operations_async import SparkSessionOperations +from ._configuration import SparkClientConfiguration +from .operations import SparkBatchOperations +from .operations import SparkSessionOperations from .. import models @@ -25,9 +25,9 @@ class SparkClient(object): """SparkClient. :ivar spark_batch: SparkBatchOperations operations - :vartype spark_batch: azure.synapse.spark.aio.operations_async.SparkBatchOperations + :vartype spark_batch: azure.synapse.spark.aio.operations.SparkBatchOperations :ivar spark_session: SparkSessionOperations operations - :vartype spark_session: azure.synapse.spark.aio.operations_async.SparkSessionOperations + :vartype spark_session: azure.synapse.spark.aio.operations.SparkSessionOperations :param credential: Credential needed for the client to connect to Azure. :type credential: ~azure.core.credentials_async.AsyncTokenCredential :param endpoint: The workspace development endpoint, for example https://myworkspace.dev.azuresynapse.net. @@ -36,7 +36,6 @@ class SparkClient(object): :type spark_pool_name: str :param livy_api_version: Valid api-version for the request. :type livy_api_version: str - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. """ def __init__( diff --git a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/operations_async/__init__.py b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/operations/__init__.py similarity index 80% rename from sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/operations_async/__init__.py rename to sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/operations/__init__.py index 57c274c55c7b..cfceeb39e559 100644 --- a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/operations_async/__init__.py +++ b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/operations/__init__.py @@ -6,8 +6,8 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from ._spark_batch_operations_async import SparkBatchOperations -from ._spark_session_operations_async import SparkSessionOperations +from ._spark_batch_operations import SparkBatchOperations +from ._spark_session_operations import SparkSessionOperations __all__ = [ 'SparkBatchOperations', diff --git a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/operations_async/_spark_batch_operations_async.py b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/operations/_spark_batch_operations.py similarity index 97% rename from sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/operations_async/_spark_batch_operations_async.py rename to sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/operations/_spark_batch_operations.py index 0fb70259d33a..f9f9c08c0792 100644 --- a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/operations_async/_spark_batch_operations_async.py +++ b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/operations/_spark_batch_operations.py @@ -64,6 +64,7 @@ async def get_spark_batch_jobs( cls = kwargs.pop('cls', None) # type: ClsType["models.SparkBatchJobCollection"] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) + accept = "application/json" # Construct URL url = self.get_spark_batch_jobs.metadata['url'] # type: ignore @@ -85,7 +86,7 @@ async def get_spark_batch_jobs( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -125,6 +126,7 @@ async def create_spark_batch_job( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.create_spark_batch_job.metadata['url'] # type: ignore @@ -143,13 +145,12 @@ async def create_spark_batch_job( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(spark_batch_job_options, 'SparkBatchJobOptions') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -186,6 +187,7 @@ async def get_spark_batch_job( cls = kwargs.pop('cls', None) # type: ClsType["models.SparkBatchJob"] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) + accept = "application/json" # Construct URL url = self.get_spark_batch_job.metadata['url'] # type: ignore @@ -204,7 +206,7 @@ async def get_spark_batch_job( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) diff --git a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/operations_async/_spark_session_operations_async.py b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/operations/_spark_session_operations.py similarity index 96% rename from sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/operations_async/_spark_session_operations_async.py rename to sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/operations/_spark_session_operations.py index 624bc2ae298f..e90a6e27876c 100644 --- a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/operations_async/_spark_session_operations_async.py +++ b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/operations/_spark_session_operations.py @@ -64,6 +64,7 @@ async def get_spark_sessions( cls = kwargs.pop('cls', None) # type: ClsType["models.SparkSessionCollection"] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) + accept = "application/json" # Construct URL url = self.get_spark_sessions.metadata['url'] # type: ignore @@ -85,7 +86,7 @@ async def get_spark_sessions( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -125,6 +126,7 @@ async def create_spark_session( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.create_spark_session.metadata['url'] # type: ignore @@ -143,13 +145,12 @@ async def create_spark_session( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(spark_session_options, 'SparkSessionOptions') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -186,6 +187,7 @@ async def get_spark_session( cls = kwargs.pop('cls', None) # type: ClsType["models.SparkSession"] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) + accept = "application/json" # Construct URL url = self.get_spark_session.metadata['url'] # type: ignore @@ -204,7 +206,7 @@ async def get_spark_session( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -333,6 +335,7 @@ async def get_spark_statements( cls = kwargs.pop('cls', None) # type: ClsType["models.SparkStatementCollection"] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) + accept = "application/json" # Construct URL url = self.get_spark_statements.metadata['url'] # type: ignore @@ -349,7 +352,7 @@ async def get_spark_statements( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -388,6 +391,7 @@ async def create_spark_statement( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.create_spark_statement.metadata['url'] # type: ignore @@ -405,13 +409,12 @@ async def create_spark_statement( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(spark_statement_options, 'SparkStatementOptions') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -447,6 +450,7 @@ async def get_spark_statement( cls = kwargs.pop('cls', None) # type: ClsType["models.SparkStatement"] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) + accept = "application/json" # Construct URL url = self.get_spark_statement.metadata['url'] # type: ignore @@ -464,7 +468,7 @@ async def get_spark_statement( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -502,6 +506,7 @@ async def cancel_spark_statement( cls = kwargs.pop('cls', None) # type: ClsType["models.SparkStatementCancellationResult"] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) + accept = "application/json" # Construct URL url = self.cancel_spark_statement.metadata['url'] # type: ignore @@ -519,7 +524,7 @@ async def cancel_spark_statement( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) diff --git a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/models/_models.py b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/models/_models.py index bf5029789c38..609f0121140b 100644 --- a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/models/_models.py +++ b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/models/_models.py @@ -474,7 +474,7 @@ class SparkSession(msrest.serialization.Model): :type scheduler: ~azure.synapse.spark.models.SparkScheduler :param plugin: :type plugin: ~azure.synapse.spark.models.SparkServicePlugin - :param errors: + :param errors: The error information. :type errors: list[~azure.synapse.spark.models.SparkServiceError] :param tags: A set of tags. Dictionary of :code:``. :type tags: dict[str, str] @@ -672,7 +672,7 @@ class SparkSessionState(msrest.serialization.Model): :type dead_at: ~datetime.datetime :param shutting_down_at: :type shutting_down_at: ~datetime.datetime - :param terminated_at: + :param terminated_at: the time that at which "killed" livy state was first seen. :type terminated_at: ~datetime.datetime :param recovering_at: :type recovering_at: ~datetime.datetime diff --git a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/models/_models_py3.py b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/models/_models_py3.py index 4e91597b1ee0..a6bbaea80bb6 100644 --- a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/models/_models_py3.py +++ b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/models/_models_py3.py @@ -561,7 +561,7 @@ class SparkSession(msrest.serialization.Model): :type scheduler: ~azure.synapse.spark.models.SparkScheduler :param plugin: :type plugin: ~azure.synapse.spark.models.SparkServicePlugin - :param errors: + :param errors: The error information. :type errors: list[~azure.synapse.spark.models.SparkServiceError] :param tags: A set of tags. Dictionary of :code:``. :type tags: dict[str, str] @@ -799,7 +799,7 @@ class SparkSessionState(msrest.serialization.Model): :type dead_at: ~datetime.datetime :param shutting_down_at: :type shutting_down_at: ~datetime.datetime - :param terminated_at: + :param terminated_at: the time that at which "killed" livy state was first seen. :type terminated_at: ~datetime.datetime :param recovering_at: :type recovering_at: ~datetime.datetime diff --git a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/models/_spark_client_enums.py b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/models/_spark_client_enums.py index 209188ebcea3..19d776f2657c 100644 --- a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/models/_spark_client_enums.py +++ b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/models/_spark_client_enums.py @@ -6,57 +6,75 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from enum import Enum +from enum import Enum, EnumMeta +from six import with_metaclass -class PluginCurrentState(str, Enum): +class _CaseInsensitiveEnumMeta(EnumMeta): + def __getitem__(self, name): + return super().__getitem__(name.upper()) - preparation = "Preparation" - resource_acquisition = "ResourceAcquisition" - queued = "Queued" - submission = "Submission" - monitoring = "Monitoring" - cleanup = "Cleanup" - ended = "Ended" + def __getattr__(cls, name): + """Return the enum member matching `name` + We use __getattr__ instead of descriptors or inserting into the enum + class' __dict__ in order to support `name` and `value` being both + properties for enum members (which live in the class' __dict__) and + enum members themselves. + """ + try: + return cls._member_map_[name.upper()] + except KeyError: + raise AttributeError(name) -class SchedulerCurrentState(str, Enum): - queued = "Queued" - scheduled = "Scheduled" - ended = "Ended" +class PluginCurrentState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): -class SparkBatchJobResultType(str, Enum): + PREPARATION = "Preparation" + RESOURCE_ACQUISITION = "ResourceAcquisition" + QUEUED = "Queued" + SUBMISSION = "Submission" + MONITORING = "Monitoring" + CLEANUP = "Cleanup" + ENDED = "Ended" + +class SchedulerCurrentState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + + QUEUED = "Queued" + SCHEDULED = "Scheduled" + ENDED = "Ended" + +class SparkBatchJobResultType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The Spark batch job result. """ - uncertain = "Uncertain" - succeeded = "Succeeded" - failed = "Failed" - cancelled = "Cancelled" + UNCERTAIN = "Uncertain" + SUCCEEDED = "Succeeded" + FAILED = "Failed" + CANCELLED = "Cancelled" -class SparkErrorSource(str, Enum): +class SparkErrorSource(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - system = "System" - user = "User" - unknown = "Unknown" - dependency = "Dependency" + SYSTEM = "System" + USER = "User" + UNKNOWN = "Unknown" + DEPENDENCY = "Dependency" -class SparkJobType(str, Enum): +class SparkJobType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The job type. """ - spark_batch = "SparkBatch" - spark_session = "SparkSession" + SPARK_BATCH = "SparkBatch" + SPARK_SESSION = "SparkSession" -class SparkSessionResultType(str, Enum): +class SparkSessionResultType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - uncertain = "Uncertain" - succeeded = "Succeeded" - failed = "Failed" - cancelled = "Cancelled" + UNCERTAIN = "Uncertain" + SUCCEEDED = "Succeeded" + FAILED = "Failed" + CANCELLED = "Cancelled" -class SparkStatementLanguageType(str, Enum): +class SparkStatementLanguageType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - spark = "spark" - pyspark = "pyspark" - dotnetspark = "dotnetspark" - sql = "sql" + SPARK = "spark" + PYSPARK = "pyspark" + DOTNETSPARK = "dotnetspark" + SQL = "sql" diff --git a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/operations/_spark_batch_operations.py b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/operations/_spark_batch_operations.py index e1ecb3cfdc0a..786a73ea80bb 100644 --- a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/operations/_spark_batch_operations.py +++ b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/operations/_spark_batch_operations.py @@ -69,6 +69,7 @@ def get_spark_batch_jobs( cls = kwargs.pop('cls', None) # type: ClsType["models.SparkBatchJobCollection"] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) + accept = "application/json" # Construct URL url = self.get_spark_batch_jobs.metadata['url'] # type: ignore @@ -90,7 +91,7 @@ def get_spark_batch_jobs( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -131,6 +132,7 @@ def create_spark_batch_job( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.create_spark_batch_job.metadata['url'] # type: ignore @@ -149,13 +151,12 @@ def create_spark_batch_job( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(spark_batch_job_options, 'SparkBatchJobOptions') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -193,6 +194,7 @@ def get_spark_batch_job( cls = kwargs.pop('cls', None) # type: ClsType["models.SparkBatchJob"] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) + accept = "application/json" # Construct URL url = self.get_spark_batch_job.metadata['url'] # type: ignore @@ -211,7 +213,7 @@ def get_spark_batch_job( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) diff --git a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/operations/_spark_session_operations.py b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/operations/_spark_session_operations.py index 9304a83d0430..3f63be6d8756 100644 --- a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/operations/_spark_session_operations.py +++ b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/operations/_spark_session_operations.py @@ -69,6 +69,7 @@ def get_spark_sessions( cls = kwargs.pop('cls', None) # type: ClsType["models.SparkSessionCollection"] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) + accept = "application/json" # Construct URL url = self.get_spark_sessions.metadata['url'] # type: ignore @@ -90,7 +91,7 @@ def get_spark_sessions( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -131,6 +132,7 @@ def create_spark_session( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.create_spark_session.metadata['url'] # type: ignore @@ -149,13 +151,12 @@ def create_spark_session( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(spark_session_options, 'SparkSessionOptions') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -193,6 +194,7 @@ def get_spark_session( cls = kwargs.pop('cls', None) # type: ClsType["models.SparkSession"] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) + accept = "application/json" # Construct URL url = self.get_spark_session.metadata['url'] # type: ignore @@ -211,7 +213,7 @@ def get_spark_session( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -343,6 +345,7 @@ def get_spark_statements( cls = kwargs.pop('cls', None) # type: ClsType["models.SparkStatementCollection"] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) + accept = "application/json" # Construct URL url = self.get_spark_statements.metadata['url'] # type: ignore @@ -359,7 +362,7 @@ def get_spark_statements( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -399,6 +402,7 @@ def create_spark_statement( error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.create_spark_statement.metadata['url'] # type: ignore @@ -416,13 +420,12 @@ def create_spark_statement( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(spark_statement_options, 'SparkStatementOptions') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -459,6 +462,7 @@ def get_spark_statement( cls = kwargs.pop('cls', None) # type: ClsType["models.SparkStatement"] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) + accept = "application/json" # Construct URL url = self.get_spark_statement.metadata['url'] # type: ignore @@ -476,7 +480,7 @@ def get_spark_statement( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -515,6 +519,7 @@ def cancel_spark_statement( cls = kwargs.pop('cls', None) # type: ClsType["models.SparkStatementCancellationResult"] error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) + accept = "application/json" # Construct URL url = self.cancel_spark_statement.metadata['url'] # type: ignore @@ -532,7 +537,7 @@ def cancel_spark_statement( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) From ddc030c01d52786f62f0025d3c3246d40745fb70 Mon Sep 17 00:00:00 2001 From: Laurent Mazuel Date: Tue, 15 Sep 2020 09:44:42 -0700 Subject: [PATCH 2/4] 0.3.0 --- sdk/synapse/azure-synapse-accesscontrol/CHANGELOG.md | 4 ++++ .../azure/synapse/accesscontrol/_version.py | 2 +- sdk/synapse/azure-synapse-artifacts/CHANGELOG.md | 4 ++++ .../azure/synapse/artifacts/_version.py | 2 +- sdk/synapse/azure-synapse-spark/CHANGELOG.md | 4 ++++ .../azure-synapse-spark/azure/synapse/spark/_version.py | 2 +- 6 files changed, 15 insertions(+), 3 deletions(-) diff --git a/sdk/synapse/azure-synapse-accesscontrol/CHANGELOG.md b/sdk/synapse/azure-synapse-accesscontrol/CHANGELOG.md index 9fc398f95895..12add4c0aa50 100644 --- a/sdk/synapse/azure-synapse-accesscontrol/CHANGELOG.md +++ b/sdk/synapse/azure-synapse-accesscontrol/CHANGELOG.md @@ -1,5 +1,9 @@ # Release History +## 0.3.0 (2020-09-15) + +* Initial Release + ## 0.2.0 (2020-07-01) * Initial Release diff --git a/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/_version.py b/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/_version.py index eae7c95b6fbd..92721eef7dd5 100644 --- a/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/_version.py +++ b/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/_version.py @@ -6,4 +6,4 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "0.1.0" +VERSION = "0.3.0" diff --git a/sdk/synapse/azure-synapse-artifacts/CHANGELOG.md b/sdk/synapse/azure-synapse-artifacts/CHANGELOG.md index 9fc398f95895..12add4c0aa50 100644 --- a/sdk/synapse/azure-synapse-artifacts/CHANGELOG.md +++ b/sdk/synapse/azure-synapse-artifacts/CHANGELOG.md @@ -1,5 +1,9 @@ # Release History +## 0.3.0 (2020-09-15) + +* Initial Release + ## 0.2.0 (2020-07-01) * Initial Release diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/_version.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/_version.py index eae7c95b6fbd..92721eef7dd5 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/_version.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/_version.py @@ -6,4 +6,4 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "0.1.0" +VERSION = "0.3.0" diff --git a/sdk/synapse/azure-synapse-spark/CHANGELOG.md b/sdk/synapse/azure-synapse-spark/CHANGELOG.md index 9fc398f95895..12add4c0aa50 100644 --- a/sdk/synapse/azure-synapse-spark/CHANGELOG.md +++ b/sdk/synapse/azure-synapse-spark/CHANGELOG.md @@ -1,5 +1,9 @@ # Release History +## 0.3.0 (2020-09-15) + +* Initial Release + ## 0.2.0 (2020-07-01) * Initial Release diff --git a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/_version.py b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/_version.py index eae7c95b6fbd..92721eef7dd5 100644 --- a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/_version.py +++ b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/_version.py @@ -6,4 +6,4 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "0.1.0" +VERSION = "0.3.0" From 1ec3f4815aa8711d21fa7af1b26178666cf66936 Mon Sep 17 00:00:00 2001 From: Laurent Mazuel Date: Tue, 15 Sep 2020 09:58:27 -0700 Subject: [PATCH 3/4] ChangeLog --- sdk/synapse/azure-synapse-accesscontrol/CHANGELOG.md | 2 +- sdk/synapse/azure-synapse-artifacts/CHANGELOG.md | 4 +++- sdk/synapse/azure-synapse-spark/CHANGELOG.md | 2 +- 3 files changed, 5 insertions(+), 3 deletions(-) diff --git a/sdk/synapse/azure-synapse-accesscontrol/CHANGELOG.md b/sdk/synapse/azure-synapse-accesscontrol/CHANGELOG.md index 12add4c0aa50..bb80dbe7182f 100644 --- a/sdk/synapse/azure-synapse-accesscontrol/CHANGELOG.md +++ b/sdk/synapse/azure-synapse-accesscontrol/CHANGELOG.md @@ -2,7 +2,7 @@ ## 0.3.0 (2020-09-15) -* Initial Release +* Internal bugfixes (re-generated with latest generator) ## 0.2.0 (2020-07-01) diff --git a/sdk/synapse/azure-synapse-artifacts/CHANGELOG.md b/sdk/synapse/azure-synapse-artifacts/CHANGELOG.md index 12add4c0aa50..4cff150170e7 100644 --- a/sdk/synapse/azure-synapse-artifacts/CHANGELOG.md +++ b/sdk/synapse/azure-synapse-artifacts/CHANGELOG.md @@ -2,7 +2,9 @@ ## 0.3.0 (2020-09-15) -* Initial Release +** Breaking changes ** + +- Migrated most long running operation to polling mechanism (operation now starts with `begin`) ## 0.2.0 (2020-07-01) diff --git a/sdk/synapse/azure-synapse-spark/CHANGELOG.md b/sdk/synapse/azure-synapse-spark/CHANGELOG.md index 12add4c0aa50..bb80dbe7182f 100644 --- a/sdk/synapse/azure-synapse-spark/CHANGELOG.md +++ b/sdk/synapse/azure-synapse-spark/CHANGELOG.md @@ -2,7 +2,7 @@ ## 0.3.0 (2020-09-15) -* Initial Release +* Internal bugfixes (re-generated with latest generator) ## 0.2.0 (2020-07-01) From 551c6b1fd81aabc029a5e39ef78810c173b85a9c Mon Sep 17 00:00:00 2001 From: Laurent Mazuel Date: Wed, 16 Sep 2020 09:41:36 -0700 Subject: [PATCH 4/4] Update with latest autorest + latest Swagger 9/16 --- .../accesscontrol/_access_control_client.py | 1 + .../aio/_access_control_client.py | 1 + .../_access_control_client_operations.py | 30 +- .../_access_control_client_operations.py | 30 +- .../azure-synapse-artifacts/CHANGELOG.md | 7 + .../synapse/artifacts/_artifacts_client.py | 21 + .../artifacts/aio/_artifacts_client.py | 21 + .../artifacts/aio/operations/__init__.py | 8 + .../operations/_big_data_pools_operations.py | 146 + .../_data_flow_debug_session_operations.py | 22 +- .../aio/operations/_data_flow_operations.py | 18 +- .../aio/operations/_dataset_operations.py | 18 +- .../_integration_runtimes_operations.py | 146 + .../operations/_linked_service_operations.py | 18 +- .../aio/operations/_notebook_operations.py | 22 +- .../aio/operations/_pipeline_operations.py | 22 +- .../operations/_pipeline_run_operations.py | 18 +- .../_spark_job_definition_operations.py | 26 +- .../aio/operations/_sql_pools_operations.py | 146 + .../aio/operations/_sql_script_operations.py | 18 +- .../aio/operations/_trigger_operations.py | 38 +- .../aio/operations/_trigger_run_operations.py | 14 +- .../aio/operations/_workspace_operations.py | 91 + .../synapse/artifacts/models/__init__.py | 747 + .../models/_artifacts_client_enums.py | 252 + .../azure/synapse/artifacts/models/_models.py | 27224 +++++++++++----- .../synapse/artifacts/models/_models_py3.py | 26996 +++++++++++---- .../synapse/artifacts/operations/__init__.py | 8 + .../operations/_big_data_pools_operations.py | 152 + .../_data_flow_debug_session_operations.py | 22 +- .../operations/_data_flow_operations.py | 18 +- .../operations/_dataset_operations.py | 18 +- .../_integration_runtimes_operations.py | 152 + .../operations/_linked_service_operations.py | 18 +- .../operations/_notebook_operations.py | 22 +- .../operations/_pipeline_operations.py | 22 +- .../operations/_pipeline_run_operations.py | 18 +- .../_spark_job_definition_operations.py | 26 +- .../operations/_sql_pools_operations.py | 152 + .../operations/_sql_script_operations.py | 18 +- .../operations/_trigger_operations.py | 38 +- .../operations/_trigger_run_operations.py | 14 +- .../operations/_workspace_operations.py | 96 + .../azure/synapse/spark/_spark_client.py | 1 + .../azure/synapse/spark/aio/_spark_client.py | 1 + .../aio/operations/_spark_batch_operations.py | 18 +- .../operations/_spark_session_operations.py | 38 +- .../operations/_spark_batch_operations.py | 18 +- .../operations/_spark_session_operations.py | 38 +- 49 files changed, 41611 insertions(+), 15398 deletions(-) create mode 100644 sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_big_data_pools_operations.py create mode 100644 sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_integration_runtimes_operations.py create mode 100644 sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_sql_pools_operations.py create mode 100644 sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_workspace_operations.py create mode 100644 sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_big_data_pools_operations.py create mode 100644 sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_integration_runtimes_operations.py create mode 100644 sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_sql_pools_operations.py create mode 100644 sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_workspace_operations.py diff --git a/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/_access_control_client.py b/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/_access_control_client.py index 1e9b6a7168a1..0a13403bfad0 100644 --- a/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/_access_control_client.py +++ b/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/_access_control_client.py @@ -44,6 +44,7 @@ def __init__( client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} self._serialize = Serializer(client_models) + self._serialize.client_side_validation = False self._deserialize = Deserializer(client_models) diff --git a/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/aio/_access_control_client.py b/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/aio/_access_control_client.py index e5a37cc4a523..922fb6e59303 100644 --- a/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/aio/_access_control_client.py +++ b/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/aio/_access_control_client.py @@ -41,6 +41,7 @@ def __init__( client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} self._serialize = Serializer(client_models) + self._serialize.client_side_validation = False self._deserialize = Deserializer(client_models) diff --git a/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/aio/operations/_access_control_client_operations.py b/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/aio/operations/_access_control_client_operations.py index bf972febb9e6..733347bac53e 100644 --- a/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/aio/operations/_access_control_client_operations.py +++ b/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/aio/operations/_access_control_client_operations.py @@ -9,7 +9,7 @@ import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest @@ -32,7 +32,9 @@ def get_role_definitions( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.RolesListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2020-02-01-preview" accept = "application/json" @@ -104,7 +106,9 @@ async def get_role_definition_by_id( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SynapseRole"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2020-02-01-preview" accept = "application/json" @@ -157,7 +161,9 @@ async def create_role_assignment( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.RoleAssignmentDetails"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2020-02-01-preview" content_type = kwargs.pop("content_type", "application/json") @@ -220,7 +226,9 @@ async def get_role_assignments( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[List["models.RoleAssignmentDetails"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2020-02-01-preview" accept = "application/json" @@ -280,7 +288,9 @@ async def get_role_assignment_by_id( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.RoleAssignmentDetails"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2020-02-01-preview" accept = "application/json" @@ -333,7 +343,9 @@ async def delete_role_assignment_by_id( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2020-02-01-preview" accept = "application/json" @@ -380,7 +392,9 @@ async def get_caller_role_assignments( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[List[str]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2020-02-01-preview" accept = "application/json" diff --git a/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/operations/_access_control_client_operations.py b/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/operations/_access_control_client_operations.py index cb69534a9872..54b7048badf9 100644 --- a/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/operations/_access_control_client_operations.py +++ b/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/operations/_access_control_client_operations.py @@ -8,7 +8,7 @@ from typing import TYPE_CHECKING import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse @@ -37,7 +37,9 @@ def get_role_definitions( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.RolesListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2020-02-01-preview" accept = "application/json" @@ -110,7 +112,9 @@ def get_role_definition_by_id( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SynapseRole"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2020-02-01-preview" accept = "application/json" @@ -164,7 +168,9 @@ def create_role_assignment( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.RoleAssignmentDetails"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2020-02-01-preview" content_type = kwargs.pop("content_type", "application/json") @@ -228,7 +234,9 @@ def get_role_assignments( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[List["models.RoleAssignmentDetails"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2020-02-01-preview" accept = "application/json" @@ -289,7 +297,9 @@ def get_role_assignment_by_id( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.RoleAssignmentDetails"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2020-02-01-preview" accept = "application/json" @@ -343,7 +353,9 @@ def delete_role_assignment_by_id( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2020-02-01-preview" accept = "application/json" @@ -391,7 +403,9 @@ def get_caller_role_assignments( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[List[str]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2020-02-01-preview" accept = "application/json" diff --git a/sdk/synapse/azure-synapse-artifacts/CHANGELOG.md b/sdk/synapse/azure-synapse-artifacts/CHANGELOG.md index 4cff150170e7..722b4dfbf02c 100644 --- a/sdk/synapse/azure-synapse-artifacts/CHANGELOG.md +++ b/sdk/synapse/azure-synapse-artifacts/CHANGELOG.md @@ -2,6 +2,13 @@ ## 0.3.0 (2020-09-15) +** Features ** + +- Add Workspace operations +- Add SqlPools operations +- Add BigDataPools operations +- Add IntegrationRuntimes operations + ** Breaking changes ** - Migrated most long running operation to polling mechanism (operation now starts with `begin`) diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/_artifacts_client.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/_artifacts_client.py index b38aadb1a7d6..cf31eab25040 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/_artifacts_client.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/_artifacts_client.py @@ -29,6 +29,10 @@ from .operations import SqlScriptOperations from .operations import SparkJobDefinitionOperations from .operations import NotebookOperations +from .operations import WorkspaceOperations +from .operations import SqlPoolsOperations +from .operations import BigDataPoolsOperations +from .operations import IntegrationRuntimesOperations from . import models @@ -57,6 +61,14 @@ class ArtifactsClient(object): :vartype spark_job_definition: azure.synapse.artifacts.operations.SparkJobDefinitionOperations :ivar notebook: NotebookOperations operations :vartype notebook: azure.synapse.artifacts.operations.NotebookOperations + :ivar workspace: WorkspaceOperations operations + :vartype workspace: azure.synapse.artifacts.operations.WorkspaceOperations + :ivar sql_pools: SqlPoolsOperations operations + :vartype sql_pools: azure.synapse.artifacts.operations.SqlPoolsOperations + :ivar big_data_pools: BigDataPoolsOperations operations + :vartype big_data_pools: azure.synapse.artifacts.operations.BigDataPoolsOperations + :ivar integration_runtimes: IntegrationRuntimesOperations operations + :vartype integration_runtimes: azure.synapse.artifacts.operations.IntegrationRuntimesOperations :param credential: Credential needed for the client to connect to Azure. :type credential: ~azure.core.credentials.TokenCredential :param endpoint: The workspace development endpoint, for example https://myworkspace.dev.azuresynapse.net. @@ -77,6 +89,7 @@ def __init__( client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} self._serialize = Serializer(client_models) + self._serialize.client_side_validation = False self._deserialize = Deserializer(client_models) self.linked_service = LinkedServiceOperations( @@ -101,6 +114,14 @@ def __init__( self._client, self._config, self._serialize, self._deserialize) self.notebook = NotebookOperations( self._client, self._config, self._serialize, self._deserialize) + self.workspace = WorkspaceOperations( + self._client, self._config, self._serialize, self._deserialize) + self.sql_pools = SqlPoolsOperations( + self._client, self._config, self._serialize, self._deserialize) + self.big_data_pools = BigDataPoolsOperations( + self._client, self._config, self._serialize, self._deserialize) + self.integration_runtimes = IntegrationRuntimesOperations( + self._client, self._config, self._serialize, self._deserialize) def close(self): # type: () -> None diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/_artifacts_client.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/_artifacts_client.py index 76dcfd962d41..43c8110c15a9 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/_artifacts_client.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/_artifacts_client.py @@ -27,6 +27,10 @@ from .operations import SqlScriptOperations from .operations import SparkJobDefinitionOperations from .operations import NotebookOperations +from .operations import WorkspaceOperations +from .operations import SqlPoolsOperations +from .operations import BigDataPoolsOperations +from .operations import IntegrationRuntimesOperations from .. import models @@ -55,6 +59,14 @@ class ArtifactsClient(object): :vartype spark_job_definition: azure.synapse.artifacts.aio.operations.SparkJobDefinitionOperations :ivar notebook: NotebookOperations operations :vartype notebook: azure.synapse.artifacts.aio.operations.NotebookOperations + :ivar workspace: WorkspaceOperations operations + :vartype workspace: azure.synapse.artifacts.aio.operations.WorkspaceOperations + :ivar sql_pools: SqlPoolsOperations operations + :vartype sql_pools: azure.synapse.artifacts.aio.operations.SqlPoolsOperations + :ivar big_data_pools: BigDataPoolsOperations operations + :vartype big_data_pools: azure.synapse.artifacts.aio.operations.BigDataPoolsOperations + :ivar integration_runtimes: IntegrationRuntimesOperations operations + :vartype integration_runtimes: azure.synapse.artifacts.aio.operations.IntegrationRuntimesOperations :param credential: Credential needed for the client to connect to Azure. :type credential: ~azure.core.credentials_async.AsyncTokenCredential :param endpoint: The workspace development endpoint, for example https://myworkspace.dev.azuresynapse.net. @@ -74,6 +86,7 @@ def __init__( client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} self._serialize = Serializer(client_models) + self._serialize.client_side_validation = False self._deserialize = Deserializer(client_models) self.linked_service = LinkedServiceOperations( @@ -98,6 +111,14 @@ def __init__( self._client, self._config, self._serialize, self._deserialize) self.notebook = NotebookOperations( self._client, self._config, self._serialize, self._deserialize) + self.workspace = WorkspaceOperations( + self._client, self._config, self._serialize, self._deserialize) + self.sql_pools = SqlPoolsOperations( + self._client, self._config, self._serialize, self._deserialize) + self.big_data_pools = BigDataPoolsOperations( + self._client, self._config, self._serialize, self._deserialize) + self.integration_runtimes = IntegrationRuntimesOperations( + self._client, self._config, self._serialize, self._deserialize) async def close(self) -> None: await self._client.close() diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/__init__.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/__init__.py index b1056111cbe8..f5afded7d4f5 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/__init__.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/__init__.py @@ -17,6 +17,10 @@ from ._sql_script_operations import SqlScriptOperations from ._spark_job_definition_operations import SparkJobDefinitionOperations from ._notebook_operations import NotebookOperations +from ._workspace_operations import WorkspaceOperations +from ._sql_pools_operations import SqlPoolsOperations +from ._big_data_pools_operations import BigDataPoolsOperations +from ._integration_runtimes_operations import IntegrationRuntimesOperations __all__ = [ 'LinkedServiceOperations', @@ -30,4 +34,8 @@ 'SqlScriptOperations', 'SparkJobDefinitionOperations', 'NotebookOperations', + 'WorkspaceOperations', + 'SqlPoolsOperations', + 'BigDataPoolsOperations', + 'IntegrationRuntimesOperations', ] diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_big_data_pools_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_big_data_pools_operations.py new file mode 100644 index 000000000000..8762d45c2248 --- /dev/null +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_big_data_pools_operations.py @@ -0,0 +1,146 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class BigDataPoolsOperations: + """BigDataPoolsOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.synapse.artifacts.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def list( + self, + **kwargs + ) -> "models.BigDataPoolResourceInfoListResult": + """List Big Data Pools. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: BigDataPoolResourceInfoListResult, or the result of cls(response) + :rtype: ~azure.synapse.artifacts.models.BigDataPoolResourceInfoListResult + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.BigDataPoolResourceInfoListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2019-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ErrorContract, response) + raise HttpResponseError(response=response, model=error) + + deserialized = self._deserialize('BigDataPoolResourceInfoListResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + list.metadata = {'url': '/bigDataPools'} # type: ignore + + async def get( + self, + big_data_pool_name: str, + **kwargs + ) -> "models.BigDataPoolResourceInfo": + """Get Big Data Pool. + + :param big_data_pool_name: The Big Data Pool name. + :type big_data_pool_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: BigDataPoolResourceInfo, or the result of cls(response) + :rtype: ~azure.synapse.artifacts.models.BigDataPoolResourceInfo + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.BigDataPoolResourceInfo"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2019-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + 'bigDataPoolName': self._serialize.url("big_data_pool_name", big_data_pool_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ErrorContract, response) + raise HttpResponseError(response=response, model=error) + + deserialized = self._deserialize('BigDataPoolResourceInfo', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/bigDataPools/{bigDataPoolName}'} # type: ignore diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_data_flow_debug_session_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_data_flow_debug_session_operations.py index 43601f7bb532..cba447f9f2cb 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_data_flow_debug_session_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_data_flow_debug_session_operations.py @@ -9,7 +9,7 @@ import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod @@ -48,7 +48,9 @@ async def _create_data_flow_debug_session_initial( **kwargs ) -> Optional["models.CreateDataFlowDebugSessionResponse"]: cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.CreateDataFlowDebugSessionResponse"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") @@ -165,7 +167,9 @@ def query_data_flow_debug_sessions_by_workspace( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.QueryDataFlowDebugSessionsResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" accept = "application/json" @@ -237,7 +241,9 @@ async def add_data_flow( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.AddDataFlowToDebugSessionResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") @@ -294,7 +300,9 @@ async def delete_data_flow_debug_session( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") @@ -339,7 +347,9 @@ async def _execute_command_initial( **kwargs ) -> Optional["models.DataFlowDebugCommandResponse"]: cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.DataFlowDebugCommandResponse"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_data_flow_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_data_flow_operations.py index 0a58c942e073..52a86db75ccd 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_data_flow_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_data_flow_operations.py @@ -9,7 +9,7 @@ import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod @@ -50,7 +50,9 @@ async def _create_or_update_data_flow_initial( **kwargs ) -> Optional["models.DataFlowResource"]: cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.DataFlowResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) _data_flow = models.DataFlowResource(properties=properties) @@ -184,7 +186,9 @@ async def get_data_flow( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" accept = "application/json" @@ -230,7 +234,9 @@ async def _delete_data_flow_initial( **kwargs ) -> None: cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" accept = "application/json" @@ -331,7 +337,9 @@ def get_data_flows_by_workspace( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" accept = "application/json" diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_dataset_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_dataset_operations.py index 589d1d7eb382..a0981e4688da 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_dataset_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_dataset_operations.py @@ -9,7 +9,7 @@ import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod @@ -54,7 +54,9 @@ def get_datasets_by_workspace( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.DatasetListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" accept = "application/json" @@ -119,7 +121,9 @@ async def _create_or_update_dataset_initial( **kwargs ) -> Optional["models.DatasetResource"]: cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.DatasetResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) _dataset = models.DatasetResource(properties=properties) @@ -253,7 +257,9 @@ async def get_dataset( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.DatasetResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" accept = "application/json" @@ -301,7 +307,9 @@ async def _delete_dataset_initial( **kwargs ) -> None: cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" accept = "application/json" diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_integration_runtimes_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_integration_runtimes_operations.py new file mode 100644 index 000000000000..aed2b9319436 --- /dev/null +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_integration_runtimes_operations.py @@ -0,0 +1,146 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class IntegrationRuntimesOperations: + """IntegrationRuntimesOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.synapse.artifacts.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def list( + self, + **kwargs + ) -> "models.IntegrationRuntimeListResponse": + """List Integration Runtimes. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IntegrationRuntimeListResponse, or the result of cls(response) + :rtype: ~azure.synapse.artifacts.models.IntegrationRuntimeListResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeListResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2019-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ErrorContract, response) + raise HttpResponseError(response=response, model=error) + + deserialized = self._deserialize('IntegrationRuntimeListResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + list.metadata = {'url': '/integrationRuntimes'} # type: ignore + + async def get( + self, + integration_runtime_name: str, + **kwargs + ) -> "models.IntegrationRuntimeResource": + """Get Integration Runtime. + + :param integration_runtime_name: The Integration Runtime name. + :type integration_runtime_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IntegrationRuntimeResource, or the result of cls(response) + :rtype: ~azure.synapse.artifacts.models.IntegrationRuntimeResource + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeResource"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2019-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ErrorContract, response) + raise HttpResponseError(response=response, model=error) + + deserialized = self._deserialize('IntegrationRuntimeResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/integrationRuntimes/{integrationRuntimeName}'} # type: ignore diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_linked_service_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_linked_service_operations.py index 53350a408c0a..7bd876f6351f 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_linked_service_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_linked_service_operations.py @@ -9,7 +9,7 @@ import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod @@ -54,7 +54,9 @@ def get_linked_services_by_workspace( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.LinkedServiceListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" accept = "application/json" @@ -119,7 +121,9 @@ async def _create_or_update_linked_service_initial( **kwargs ) -> Optional["models.LinkedServiceResource"]: cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.LinkedServiceResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) _linked_service = models.LinkedServiceResource(properties=properties) @@ -254,7 +258,9 @@ async def get_linked_service( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.LinkedServiceResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" accept = "application/json" @@ -302,7 +308,9 @@ async def _delete_linked_service_initial( **kwargs ) -> None: cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" accept = "application/json" diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_notebook_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_notebook_operations.py index 831087bc47af..8635059dfee7 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_notebook_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_notebook_operations.py @@ -9,7 +9,7 @@ import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod @@ -54,7 +54,9 @@ def get_notebooks_by_workspace( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.NotebookListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" accept = "application/json" @@ -123,7 +125,9 @@ def get_notebook_summary_by_work_space( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.NotebookListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" accept = "application/json" @@ -188,7 +192,9 @@ async def _create_or_update_notebook_initial( **kwargs ) -> Optional["models.NotebookResource"]: cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.NotebookResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) _notebook = models.NotebookResource(properties=properties) @@ -322,7 +328,9 @@ async def get_notebook( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.NotebookResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" accept = "application/json" @@ -370,7 +378,9 @@ async def _delete_notebook_initial( **kwargs ) -> None: cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" accept = "application/json" diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_pipeline_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_pipeline_operations.py index f612490f6172..892d72270055 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_pipeline_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_pipeline_operations.py @@ -9,7 +9,7 @@ import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod @@ -54,7 +54,9 @@ def get_pipelines_by_workspace( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" accept = "application/json" @@ -119,7 +121,9 @@ async def _create_or_update_pipeline_initial( **kwargs ) -> Optional["models.PipelineResource"]: cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.PipelineResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") @@ -251,7 +255,9 @@ async def get_pipeline( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.PipelineResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" accept = "application/json" @@ -299,7 +305,9 @@ async def _delete_pipeline_initial( **kwargs ) -> None: cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" accept = "application/json" @@ -419,7 +427,9 @@ async def create_pipeline_run( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.CreateRunResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_pipeline_run_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_pipeline_run_operations.py index 81e09bc3cd17..8651bf55c955 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_pipeline_run_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_pipeline_run_operations.py @@ -8,7 +8,7 @@ from typing import Any, Callable, Dict, Generic, Optional, TypeVar import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest @@ -54,7 +54,9 @@ async def query_pipeline_runs_by_workspace( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineRunsQueryResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") @@ -111,7 +113,9 @@ async def get_pipeline_run( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineRun"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" accept = "application/json" @@ -170,7 +174,9 @@ async def query_activity_runs( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ActivityRunsQueryResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") @@ -233,7 +239,9 @@ async def cancel_pipeline_run( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" accept = "application/json" diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_spark_job_definition_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_spark_job_definition_operations.py index 3308316fb796..0f3bf8bffe85 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_spark_job_definition_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_spark_job_definition_operations.py @@ -9,7 +9,7 @@ import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod @@ -54,7 +54,9 @@ def get_spark_job_definitions_by_workspace( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SparkJobDefinitionsListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" accept = "application/json" @@ -133,7 +135,9 @@ async def create_or_update_spark_job_definition( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SparkJobDefinitionResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) _spark_job_definition = models.SparkJobDefinitionResource(properties=properties) @@ -200,7 +204,9 @@ async def get_spark_job_definition( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.SparkJobDefinitionResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" accept = "application/json" @@ -257,7 +263,9 @@ async def delete_spark_job_definition( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" accept = "application/json" @@ -298,7 +306,9 @@ async def _execute_spark_job_definition_initial( **kwargs ) -> "models.SparkBatchJob": cls = kwargs.pop('cls', None) # type: ClsType["models.SparkBatchJob"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" accept = "application/json" @@ -403,7 +413,9 @@ async def _debug_spark_job_definition_initial( **kwargs ) -> "models.SparkBatchJob": cls = kwargs.pop('cls', None) # type: ClsType["models.SparkBatchJob"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) _spark_job_definition_azure_resource = models.SparkJobDefinitionResource(properties=properties) diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_sql_pools_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_sql_pools_operations.py new file mode 100644 index 000000000000..1c4f0feb7294 --- /dev/null +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_sql_pools_operations.py @@ -0,0 +1,146 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class SqlPoolsOperations: + """SqlPoolsOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.synapse.artifacts.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def list( + self, + **kwargs + ) -> "models.SqlPoolInfoListResult": + """List Sql Pools. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: SqlPoolInfoListResult, or the result of cls(response) + :rtype: ~azure.synapse.artifacts.models.SqlPoolInfoListResult + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.SqlPoolInfoListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2019-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ErrorContract, response) + raise HttpResponseError(response=response, model=error) + + deserialized = self._deserialize('SqlPoolInfoListResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + list.metadata = {'url': '/sqlPools'} # type: ignore + + async def get( + self, + sql_pool_name: str, + **kwargs + ) -> "models.SqlPool": + """Get Sql Pool. + + :param sql_pool_name: The Sql Pool name. + :type sql_pool_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: SqlPool, or the result of cls(response) + :rtype: ~azure.synapse.artifacts.models.SqlPool + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.SqlPool"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2019-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + 'sqlPoolName': self._serialize.url("sql_pool_name", sql_pool_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ErrorContract, response) + raise HttpResponseError(response=response, model=error) + + deserialized = self._deserialize('SqlPool', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/sqlPools/{sqlPoolName}'} # type: ignore diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_sql_script_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_sql_script_operations.py index 5d77468f8919..bba7e75e340e 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_sql_script_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_sql_script_operations.py @@ -9,7 +9,7 @@ import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest @@ -52,7 +52,9 @@ def get_sql_scripts_by_workspace( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SqlScriptsListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" accept = "application/json" @@ -131,7 +133,9 @@ async def create_or_update_sql_script( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SqlScriptResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) _sql_script = models.SqlScriptResource(properties=properties) @@ -197,7 +201,9 @@ async def get_sql_script( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.SqlScriptResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" accept = "application/json" @@ -254,7 +260,9 @@ async def delete_sql_script( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" accept = "application/json" diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_trigger_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_trigger_operations.py index d4ce019823ab..866aedc0a1aa 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_trigger_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_trigger_operations.py @@ -9,7 +9,7 @@ import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod @@ -54,7 +54,9 @@ def get_triggers_by_workspace( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" accept = "application/json" @@ -119,7 +121,9 @@ async def _create_or_update_trigger_initial( **kwargs ) -> Optional["models.TriggerResource"]: cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) _trigger = models.TriggerResource(properties=properties) @@ -253,7 +257,9 @@ async def get_trigger( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" accept = "application/json" @@ -301,7 +307,9 @@ async def _delete_trigger_initial( **kwargs ) -> None: cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" accept = "application/json" @@ -396,7 +404,9 @@ async def _subscribe_trigger_to_events_initial( **kwargs ) -> Optional["models.TriggerSubscriptionOperationStatus"]: cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerSubscriptionOperationStatus"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" accept = "application/json" @@ -508,7 +518,9 @@ async def get_event_subscription_status( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerSubscriptionOperationStatus"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" accept = "application/json" @@ -552,7 +564,9 @@ async def _unsubscribe_trigger_from_events_initial( **kwargs ) -> Optional["models.TriggerSubscriptionOperationStatus"]: cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerSubscriptionOperationStatus"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" accept = "application/json" @@ -655,7 +669,9 @@ async def _start_trigger_initial( **kwargs ) -> None: cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" accept = "application/json" @@ -750,7 +766,9 @@ async def _stop_trigger_initial( **kwargs ) -> None: cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" accept = "application/json" diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_trigger_run_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_trigger_run_operations.py index c0b76647f6b7..64299e81536b 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_trigger_run_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_trigger_run_operations.py @@ -8,7 +8,7 @@ from typing import Any, Callable, Dict, Generic, Optional, TypeVar import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest @@ -57,7 +57,9 @@ async def rerun_trigger_instance( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" accept = "application/json" @@ -111,7 +113,9 @@ async def cancel_trigger_instance( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" accept = "application/json" @@ -162,7 +166,9 @@ async def query_trigger_runs_by_workspace( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerRunsQueryResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_workspace_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_workspace_operations.py new file mode 100644 index 000000000000..7704c4f36dc6 --- /dev/null +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_workspace_operations.py @@ -0,0 +1,91 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class WorkspaceOperations: + """WorkspaceOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.synapse.artifacts.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def get( + self, + **kwargs + ) -> "models.Workspace": + """Get Workspace. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Workspace, or the result of cls(response) + :rtype: ~azure.synapse.artifacts.models.Workspace + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.Workspace"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2019-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ErrorContract, response) + raise HttpResponseError(response=response, model=error) + + deserialized = self._deserialize('Workspace', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/workspace'} # type: ignore diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/__init__.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/__init__.py index 3a3a4dd220cd..1174fc376d89 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/__init__.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/__init__.py @@ -15,21 +15,49 @@ from ._models_py3 import AddDataFlowToDebugSessionResponse from ._models_py3 import AmazonMWSLinkedService from ._models_py3 import AmazonMWSObjectDataset + from ._models_py3 import AmazonMWSSource from ._models_py3 import AmazonRedshiftLinkedService + from ._models_py3 import AmazonRedshiftSource from ._models_py3 import AmazonRedshiftTableDataset from ._models_py3 import AmazonS3LinkedService + from ._models_py3 import AmazonS3Location + from ._models_py3 import AmazonS3ReadSettings from ._models_py3 import AppendVariableActivity + from ._models_py3 import AutoPauseProperties + from ._models_py3 import AutoScaleProperties from ._models_py3 import AvroDataset + from ._models_py3 import AvroFormat + from ._models_py3 import AvroSink + from ._models_py3 import AvroSource + from ._models_py3 import AvroWriteSettings from ._models_py3 import AzureBatchLinkedService from ._models_py3 import AzureBlobFSLinkedService + from ._models_py3 import AzureBlobFSLocation + from ._models_py3 import AzureBlobFSReadSettings + from ._models_py3 import AzureBlobFSSink + from ._models_py3 import AzureBlobFSSource + from ._models_py3 import AzureBlobFSWriteSettings from ._models_py3 import AzureBlobStorageLinkedService + from ._models_py3 import AzureBlobStorageLocation + from ._models_py3 import AzureBlobStorageReadSettings + from ._models_py3 import AzureBlobStorageWriteSettings from ._models_py3 import AzureDataExplorerCommandActivity from ._models_py3 import AzureDataExplorerLinkedService + from ._models_py3 import AzureDataExplorerSink + from ._models_py3 import AzureDataExplorerSource from ._models_py3 import AzureDataExplorerTableDataset from ._models_py3 import AzureDataLakeAnalyticsLinkedService from ._models_py3 import AzureDataLakeStoreLinkedService + from ._models_py3 import AzureDataLakeStoreLocation + from ._models_py3 import AzureDataLakeStoreReadSettings + from ._models_py3 import AzureDataLakeStoreSink + from ._models_py3 import AzureDataLakeStoreSource + from ._models_py3 import AzureDataLakeStoreWriteSettings from ._models_py3 import AzureDatabricksLinkedService + from ._models_py3 import AzureEntityResource from ._models_py3 import AzureFileStorageLinkedService + from ._models_py3 import AzureFileStorageLocation + from ._models_py3 import AzureFileStorageReadSettings from ._models_py3 import AzureFunctionActivity from ._models_py3 import AzureFunctionLinkedService from ._models_py3 import AzureKeyVaultLinkedService @@ -41,40 +69,70 @@ from ._models_py3 import AzureMLUpdateResourceActivity from ._models_py3 import AzureMLWebServiceFile from ._models_py3 import AzureMariaDBLinkedService + from ._models_py3 import AzureMariaDBSource from ._models_py3 import AzureMariaDBTableDataset from ._models_py3 import AzureMySqlLinkedService + from ._models_py3 import AzureMySqlSink + from ._models_py3 import AzureMySqlSource from ._models_py3 import AzureMySqlTableDataset from ._models_py3 import AzurePostgreSqlLinkedService + from ._models_py3 import AzurePostgreSqlSink + from ._models_py3 import AzurePostgreSqlSource from ._models_py3 import AzurePostgreSqlTableDataset + from ._models_py3 import AzureQueueSink from ._models_py3 import AzureSearchIndexDataset + from ._models_py3 import AzureSearchIndexSink from ._models_py3 import AzureSearchLinkedService from ._models_py3 import AzureSqlDWLinkedService from ._models_py3 import AzureSqlDWTableDataset from ._models_py3 import AzureSqlDatabaseLinkedService from ._models_py3 import AzureSqlMILinkedService from ._models_py3 import AzureSqlMITableDataset + from ._models_py3 import AzureSqlSink + from ._models_py3 import AzureSqlSource from ._models_py3 import AzureSqlTableDataset from ._models_py3 import AzureStorageLinkedService from ._models_py3 import AzureTableDataset + from ._models_py3 import AzureTableSink + from ._models_py3 import AzureTableSource from ._models_py3 import AzureTableStorageLinkedService from ._models_py3 import BigDataPoolReference + from ._models_py3 import BigDataPoolResourceInfo + from ._models_py3 import BigDataPoolResourceInfoListResult from ._models_py3 import BinaryDataset + from ._models_py3 import BinarySink + from ._models_py3 import BinarySource + from ._models_py3 import BlobEventsTrigger + from ._models_py3 import BlobSink + from ._models_py3 import BlobSource + from ._models_py3 import BlobTrigger from ._models_py3 import CassandraLinkedService + from ._models_py3 import CassandraSource from ._models_py3 import CassandraTableDataset + from ._models_py3 import ChainingTrigger from ._models_py3 import CloudError from ._models_py3 import CommonDataServiceForAppsEntityDataset from ._models_py3 import CommonDataServiceForAppsLinkedService + from ._models_py3 import CommonDataServiceForAppsSink + from ._models_py3 import CommonDataServiceForAppsSource from ._models_py3 import ConcurLinkedService from ._models_py3 import ConcurObjectDataset + from ._models_py3 import ConcurSource from ._models_py3 import ControlActivity from ._models_py3 import CopyActivity from ._models_py3 import CopySink from ._models_py3 import CopySource + from ._models_py3 import CopyTranslator from ._models_py3 import CosmosDbLinkedService from ._models_py3 import CosmosDbMongoDbApiCollectionDataset from ._models_py3 import CosmosDbMongoDbApiLinkedService + from ._models_py3 import CosmosDbMongoDbApiSink + from ._models_py3 import CosmosDbMongoDbApiSource from ._models_py3 import CosmosDbSqlApiCollectionDataset + from ._models_py3 import CosmosDbSqlApiSink + from ._models_py3 import CosmosDbSqlApiSource from ._models_py3 import CouchbaseLinkedService + from ._models_py3 import CouchbaseSource from ._models_py3 import CouchbaseTableDataset from ._models_py3 import CreateDataFlowDebugSessionRequest from ._models_py3 import CreateDataFlowDebugSessionResponse @@ -82,6 +140,10 @@ from ._models_py3 import CustomActivity from ._models_py3 import CustomActivityReferenceObject from ._models_py3 import CustomDataSourceLinkedService + from ._models_py3 import CustomDataset + from ._models_py3 import CustomSetupBase + from ._models_py3 import DWCopyCommandDefaultValue + from ._models_py3 import DWCopyCommandSettings from ._models_py3 import DataFlow from ._models_py3 import DataFlowDebugCommandRequest from ._models_py3 import DataFlowDebugCommandResponse @@ -102,12 +164,14 @@ from ._models_py3 import DataFlowSourceSetting from ._models_py3 import DataFlowStagingInfo from ._models_py3 import DataLakeAnalyticsUSQLActivity + from ._models_py3 import DataLakeStorageAccountDetails from ._models_py3 import DatabricksNotebookActivity from ._models_py3 import DatabricksSparkJarActivity from ._models_py3 import DatabricksSparkPythonActivity from ._models_py3 import Dataset from ._models_py3 import DatasetBZip2Compression from ._models_py3 import DatasetCompression + from ._models_py3 import DatasetDataElement from ._models_py3 import DatasetDebugResource from ._models_py3 import DatasetDeflateCompression from ._models_py3 import DatasetFolder @@ -116,23 +180,45 @@ from ._models_py3 import DatasetLocation from ._models_py3 import DatasetReference from ._models_py3 import DatasetResource + from ._models_py3 import DatasetSchemaDataElement + from ._models_py3 import DatasetStorageFormat from ._models_py3 import DatasetZipDeflateCompression from ._models_py3 import Db2LinkedService + from ._models_py3 import Db2Source from ._models_py3 import Db2TableDataset from ._models_py3 import DeleteActivity from ._models_py3 import DeleteDataFlowDebugSessionRequest from ._models_py3 import DelimitedTextDataset + from ._models_py3 import DelimitedTextReadSettings + from ._models_py3 import DelimitedTextSink + from ._models_py3 import DelimitedTextSource + from ._models_py3 import DelimitedTextWriteSettings + from ._models_py3 import DependencyReference + from ._models_py3 import DistcpSettings from ._models_py3 import DocumentDbCollectionDataset + from ._models_py3 import DocumentDbCollectionSink + from ._models_py3 import DocumentDbCollectionSource from ._models_py3 import DrillLinkedService + from ._models_py3 import DrillSource from ._models_py3 import DrillTableDataset from ._models_py3 import DynamicsAXLinkedService from ._models_py3 import DynamicsAXResourceDataset + from ._models_py3 import DynamicsAXSource from ._models_py3 import DynamicsCrmEntityDataset from ._models_py3 import DynamicsCrmLinkedService + from ._models_py3 import DynamicsCrmSink + from ._models_py3 import DynamicsCrmSource from ._models_py3 import DynamicsEntityDataset from ._models_py3 import DynamicsLinkedService + from ._models_py3 import DynamicsSink + from ._models_py3 import DynamicsSource from ._models_py3 import EloquaLinkedService from ._models_py3 import EloquaObjectDataset + from ._models_py3 import EloquaSource + from ._models_py3 import EntityReference + from ._models_py3 import ErrorAdditionalInfo + from ._models_py3 import ErrorContract + from ._models_py3 import ErrorResponse from ._models_py3 import EvaluateDataFlowExpressionRequest from ._models_py3 import ExecuteDataFlowActivity from ._models_py3 import ExecuteDataFlowActivityTypePropertiesCompute @@ -143,20 +229,35 @@ from ._models_py3 import ExposureControlResponse from ._models_py3 import Expression from ._models_py3 import FileServerLinkedService + from ._models_py3 import FileServerLocation + from ._models_py3 import FileServerReadSettings + from ._models_py3 import FileServerWriteSettings + from ._models_py3 import FileSystemSink + from ._models_py3 import FileSystemSource from ._models_py3 import FilterActivity from ._models_py3 import ForEachActivity + from ._models_py3 import FormatReadSettings + from ._models_py3 import FormatWriteSettings + from ._models_py3 import FtpReadSettings from ._models_py3 import FtpServerLinkedService + from ._models_py3 import FtpServerLocation from ._models_py3 import GetMetadataActivity from ._models_py3 import GetSsisObjectMetadataRequest from ._models_py3 import GoogleAdWordsLinkedService from ._models_py3 import GoogleAdWordsObjectDataset + from ._models_py3 import GoogleAdWordsSource from ._models_py3 import GoogleBigQueryLinkedService from ._models_py3 import GoogleBigQueryObjectDataset + from ._models_py3 import GoogleBigQuerySource from ._models_py3 import GoogleCloudStorageLinkedService + from ._models_py3 import GoogleCloudStorageLocation + from ._models_py3 import GoogleCloudStorageReadSettings from ._models_py3 import GreenplumLinkedService + from ._models_py3 import GreenplumSource from ._models_py3 import GreenplumTableDataset from ._models_py3 import HBaseLinkedService from ._models_py3 import HBaseObjectDataset + from ._models_py3 import HBaseSource from ._models_py3 import HDInsightHiveActivity from ._models_py3 import HDInsightLinkedService from ._models_py3 import HDInsightMapReduceActivity @@ -165,20 +266,50 @@ from ._models_py3 import HDInsightSparkActivity from ._models_py3 import HDInsightStreamingActivity from ._models_py3 import HdfsLinkedService + from ._models_py3 import HdfsLocation + from ._models_py3 import HdfsReadSettings + from ._models_py3 import HdfsSource from ._models_py3 import HiveLinkedService from ._models_py3 import HiveObjectDataset + from ._models_py3 import HiveSource from ._models_py3 import HttpLinkedService + from ._models_py3 import HttpReadSettings + from ._models_py3 import HttpServerLocation + from ._models_py3 import HttpSource from ._models_py3 import HubspotLinkedService from ._models_py3 import HubspotObjectDataset + from ._models_py3 import HubspotSource from ._models_py3 import IfConditionActivity from ._models_py3 import ImpalaLinkedService from ._models_py3 import ImpalaObjectDataset + from ._models_py3 import ImpalaSource from ._models_py3 import InformixLinkedService + from ._models_py3 import InformixSink + from ._models_py3 import InformixSource from ._models_py3 import InformixTableDataset + from ._models_py3 import IntegrationRuntime + from ._models_py3 import IntegrationRuntimeComputeProperties + from ._models_py3 import IntegrationRuntimeCustomSetupScriptProperties + from ._models_py3 import IntegrationRuntimeDataFlowProperties + from ._models_py3 import IntegrationRuntimeDataProxyProperties + from ._models_py3 import IntegrationRuntimeListResponse from ._models_py3 import IntegrationRuntimeReference + from ._models_py3 import IntegrationRuntimeResource + from ._models_py3 import IntegrationRuntimeSsisCatalogInfo + from ._models_py3 import IntegrationRuntimeSsisProperties + from ._models_py3 import IntegrationRuntimeVNetProperties from ._models_py3 import JiraLinkedService from ._models_py3 import JiraObjectDataset + from ._models_py3 import JiraSource from ._models_py3 import JsonDataset + from ._models_py3 import JsonFormat + from ._models_py3 import JsonSink + from ._models_py3 import JsonSource + from ._models_py3 import JsonWriteSettings + from ._models_py3 import LibraryRequirements + from ._models_py3 import LinkedIntegrationRuntimeKeyAuthorization + from ._models_py3 import LinkedIntegrationRuntimeRbacAuthorization + from ._models_py3 import LinkedIntegrationRuntimeType from ._models_py3 import LinkedService from ._models_py3 import LinkedServiceDebugResource from ._models_py3 import LinkedServiceListResponse @@ -188,21 +319,34 @@ from ._models_py3 import LookupActivity from ._models_py3 import MagentoLinkedService from ._models_py3 import MagentoObjectDataset + from ._models_py3 import MagentoSource + from ._models_py3 import ManagedIdentity + from ._models_py3 import ManagedIntegrationRuntime from ._models_py3 import MappingDataFlow from ._models_py3 import MariaDBLinkedService + from ._models_py3 import MariaDBSource from ._models_py3 import MariaDBTableDataset from ._models_py3 import MarketoLinkedService from ._models_py3 import MarketoObjectDataset + from ._models_py3 import MarketoSource from ._models_py3 import MicrosoftAccessLinkedService + from ._models_py3 import MicrosoftAccessSink + from ._models_py3 import MicrosoftAccessSource from ._models_py3 import MicrosoftAccessTableDataset from ._models_py3 import MongoDbCollectionDataset + from ._models_py3 import MongoDbCursorMethodsProperties from ._models_py3 import MongoDbLinkedService + from ._models_py3 import MongoDbSource from ._models_py3 import MongoDbV2CollectionDataset from ._models_py3 import MongoDbV2LinkedService + from ._models_py3 import MongoDbV2Source from ._models_py3 import MultiplePipelineTrigger from ._models_py3 import MySqlLinkedService + from ._models_py3 import MySqlSource from ._models_py3 import MySqlTableDataset from ._models_py3 import NetezzaLinkedService + from ._models_py3 import NetezzaPartitionSettings + from ._models_py3 import NetezzaSource from ._models_py3 import NetezzaTableDataset from ._models_py3 import Notebook from ._models_py3 import NotebookCell @@ -215,21 +359,37 @@ from ._models_py3 import NotebookSessionProperties from ._models_py3 import ODataLinkedService from ._models_py3 import ODataResourceDataset + from ._models_py3 import ODataSource from ._models_py3 import OdbcLinkedService + from ._models_py3 import OdbcSink + from ._models_py3 import OdbcSource from ._models_py3 import OdbcTableDataset from ._models_py3 import Office365Dataset from ._models_py3 import Office365LinkedService + from ._models_py3 import Office365Source from ._models_py3 import OracleLinkedService + from ._models_py3 import OraclePartitionSettings from ._models_py3 import OracleServiceCloudLinkedService from ._models_py3 import OracleServiceCloudObjectDataset + from ._models_py3 import OracleServiceCloudSource + from ._models_py3 import OracleSink + from ._models_py3 import OracleSource from ._models_py3 import OracleTableDataset from ._models_py3 import OrcDataset + from ._models_py3 import OrcFormat + from ._models_py3 import OrcSink + from ._models_py3 import OrcSource from ._models_py3 import ParameterSpecification from ._models_py3 import ParquetDataset + from ._models_py3 import ParquetFormat + from ._models_py3 import ParquetSink + from ._models_py3 import ParquetSource from ._models_py3 import PaypalLinkedService from ._models_py3 import PaypalObjectDataset + from ._models_py3 import PaypalSource from ._models_py3 import PhoenixLinkedService from ._models_py3 import PhoenixObjectDataset + from ._models_py3 import PhoenixSource from ._models_py3 import PipelineFolder from ._models_py3 import PipelineListResponse from ._models_py3 import PipelineReference @@ -237,14 +397,26 @@ from ._models_py3 import PipelineRun from ._models_py3 import PipelineRunInvokedBy from ._models_py3 import PipelineRunsQueryResponse + from ._models_py3 import PolybaseSettings from ._models_py3 import PostgreSqlLinkedService + from ._models_py3 import PostgreSqlSource from ._models_py3 import PostgreSqlTableDataset from ._models_py3 import PrestoLinkedService from ._models_py3 import PrestoObjectDataset + from ._models_py3 import PrestoSource + from ._models_py3 import PrivateEndpoint + from ._models_py3 import PrivateEndpointConnection + from ._models_py3 import PrivateLinkServiceConnectionState + from ._models_py3 import ProxyResource from ._models_py3 import QueryDataFlowDebugSessionsResponse from ._models_py3 import QuickBooksLinkedService from ._models_py3 import QuickBooksObjectDataset + from ._models_py3 import QuickBooksSource + from ._models_py3 import RecurrenceSchedule + from ._models_py3 import RecurrenceScheduleOccurrence from ._models_py3 import RedirectIncompatibleRowSettings + from ._models_py3 import RedshiftUnloadSettings + from ._models_py3 import RelationalSource from ._models_py3 import RelationalTableDataset from ._models_py3 import RerunTriggerListResponse from ._models_py3 import RerunTriggerResource @@ -253,8 +425,11 @@ from ._models_py3 import Resource from ._models_py3 import ResponsysLinkedService from ._models_py3 import ResponsysObjectDataset + from ._models_py3 import ResponsysSource from ._models_py3 import RestResourceDataset from ._models_py3 import RestServiceLinkedService + from ._models_py3 import RestSource + from ._models_py3 import RetryPolicy from ._models_py3 import RunFilterParameters from ._models_py3 import RunQueryFilter from ._models_py3 import RunQueryOrderBy @@ -268,30 +443,54 @@ from ._models_py3 import SalesforceLinkedService from ._models_py3 import SalesforceMarketingCloudLinkedService from ._models_py3 import SalesforceMarketingCloudObjectDataset + from ._models_py3 import SalesforceMarketingCloudSource from ._models_py3 import SalesforceObjectDataset from ._models_py3 import SalesforceServiceCloudLinkedService from ._models_py3 import SalesforceServiceCloudObjectDataset + from ._models_py3 import SalesforceServiceCloudSink + from ._models_py3 import SalesforceServiceCloudSource + from ._models_py3 import SalesforceSink + from ._models_py3 import SalesforceSource from ._models_py3 import SapBWLinkedService from ._models_py3 import SapBwCubeDataset + from ._models_py3 import SapBwSource from ._models_py3 import SapCloudForCustomerLinkedService from ._models_py3 import SapCloudForCustomerResourceDataset + from ._models_py3 import SapCloudForCustomerSink + from ._models_py3 import SapCloudForCustomerSource from ._models_py3 import SapEccLinkedService from ._models_py3 import SapEccResourceDataset + from ._models_py3 import SapEccSource from ._models_py3 import SapHanaLinkedService + from ._models_py3 import SapHanaPartitionSettings + from ._models_py3 import SapHanaSource from ._models_py3 import SapHanaTableDataset from ._models_py3 import SapOpenHubLinkedService + from ._models_py3 import SapOpenHubSource from ._models_py3 import SapOpenHubTableDataset from ._models_py3 import SapTableLinkedService + from ._models_py3 import SapTablePartitionSettings from ._models_py3 import SapTableResourceDataset + from ._models_py3 import SapTableSource + from ._models_py3 import ScheduleTrigger + from ._models_py3 import ScheduleTriggerRecurrence from ._models_py3 import ScriptAction from ._models_py3 import SecretBase from ._models_py3 import SecureString + from ._models_py3 import SelfDependencyTumblingWindowTriggerReference + from ._models_py3 import SelfHostedIntegrationRuntime from ._models_py3 import ServiceNowLinkedService from ._models_py3 import ServiceNowObjectDataset + from ._models_py3 import ServiceNowSource from ._models_py3 import SetVariableActivity + from ._models_py3 import SftpLocation + from ._models_py3 import SftpReadSettings from ._models_py3 import SftpServerLinkedService + from ._models_py3 import SftpWriteSettings from ._models_py3 import ShopifyLinkedService from ._models_py3 import ShopifyObjectDataset + from ._models_py3 import ShopifySource + from ._models_py3 import Sku from ._models_py3 import SparkBatchJob from ._models_py3 import SparkBatchJobState from ._models_py3 import SparkJobDefinition @@ -304,7 +503,14 @@ from ._models_py3 import SparkScheduler from ._models_py3 import SparkServiceError from ._models_py3 import SparkServicePlugin + from ._models_py3 import SparkSource from ._models_py3 import SqlConnection + from ._models_py3 import SqlDWSink + from ._models_py3 import SqlDWSource + from ._models_py3 import SqlMISink + from ._models_py3 import SqlMISource + from ._models_py3 import SqlPool + from ._models_py3 import SqlPoolInfoListResult from ._models_py3 import SqlPoolReference from ._models_py3 import SqlPoolStoredProcedureActivity from ._models_py3 import SqlScript @@ -313,42 +519,62 @@ from ._models_py3 import SqlScriptResource from ._models_py3 import SqlScriptsListResponse from ._models_py3 import SqlServerLinkedService + from ._models_py3 import SqlServerSink + from ._models_py3 import SqlServerSource from ._models_py3 import SqlServerStoredProcedureActivity from ._models_py3 import SqlServerTableDataset + from ._models_py3 import SqlSink + from ._models_py3 import SqlSource from ._models_py3 import SquareLinkedService from ._models_py3 import SquareObjectDataset + from ._models_py3 import SquareSource from ._models_py3 import SsisObjectMetadataStatusResponse from ._models_py3 import StagingSettings from ._models_py3 import StartDataFlowDebugSessionRequest from ._models_py3 import StartDataFlowDebugSessionResponse + from ._models_py3 import StoreReadSettings + from ._models_py3 import StoreWriteSettings from ._models_py3 import StoredProcedureParameter from ._models_py3 import SubResource from ._models_py3 import SubResourceDebugResource from ._models_py3 import SwitchActivity from ._models_py3 import SwitchCase from ._models_py3 import SybaseLinkedService + from ._models_py3 import SybaseSource from ._models_py3 import SybaseTableDataset from ._models_py3 import SynapseNotebookActivity from ._models_py3 import SynapseNotebookReference from ._models_py3 import SynapseSparkJobDefinitionActivity from ._models_py3 import SynapseSparkJobReference + from ._models_py3 import TabularSource + from ._models_py3 import TabularTranslator from ._models_py3 import TeradataLinkedService + from ._models_py3 import TeradataPartitionSettings + from ._models_py3 import TeradataSource from ._models_py3 import TeradataTableDataset + from ._models_py3 import TextFormat + from ._models_py3 import TrackedResource from ._models_py3 import Transformation from ._models_py3 import Trigger from ._models_py3 import TriggerDependencyProvisioningStatus + from ._models_py3 import TriggerDependencyReference from ._models_py3 import TriggerListResponse from ._models_py3 import TriggerPipelineReference + from ._models_py3 import TriggerReference from ._models_py3 import TriggerResource from ._models_py3 import TriggerRun from ._models_py3 import TriggerRunsQueryResponse from ._models_py3 import TriggerSubscriptionOperationStatus + from ._models_py3 import TumblingWindowTrigger + from ._models_py3 import TumblingWindowTriggerDependencyReference from ._models_py3 import UntilActivity from ._models_py3 import UserProperty from ._models_py3 import ValidationActivity from ._models_py3 import VariableSpecification from ._models_py3 import VerticaLinkedService + from ._models_py3 import VerticaSource from ._models_py3 import VerticaTableDataset + from ._models_py3 import VirtualNetworkProfile from ._models_py3 import WaitActivity from ._models_py3 import WebActivity from ._models_py3 import WebActivityAuthentication @@ -358,14 +584,17 @@ from ._models_py3 import WebHookActivity from ._models_py3 import WebLinkedService from ._models_py3 import WebLinkedServiceTypeProperties + from ._models_py3 import WebSource from ._models_py3 import WebTableDataset from ._models_py3 import Workspace from ._models_py3 import WorkspaceIdentity from ._models_py3 import WorkspaceUpdateParameters from ._models_py3 import XeroLinkedService from ._models_py3 import XeroObjectDataset + from ._models_py3 import XeroSource from ._models_py3 import ZohoLinkedService from ._models_py3 import ZohoObjectDataset + from ._models_py3 import ZohoSource except (SyntaxError, ImportError): from ._models import Activity # type: ignore from ._models import ActivityDependency # type: ignore @@ -375,21 +604,49 @@ from ._models import AddDataFlowToDebugSessionResponse # type: ignore from ._models import AmazonMWSLinkedService # type: ignore from ._models import AmazonMWSObjectDataset # type: ignore + from ._models import AmazonMWSSource # type: ignore from ._models import AmazonRedshiftLinkedService # type: ignore + from ._models import AmazonRedshiftSource # type: ignore from ._models import AmazonRedshiftTableDataset # type: ignore from ._models import AmazonS3LinkedService # type: ignore + from ._models import AmazonS3Location # type: ignore + from ._models import AmazonS3ReadSettings # type: ignore from ._models import AppendVariableActivity # type: ignore + from ._models import AutoPauseProperties # type: ignore + from ._models import AutoScaleProperties # type: ignore from ._models import AvroDataset # type: ignore + from ._models import AvroFormat # type: ignore + from ._models import AvroSink # type: ignore + from ._models import AvroSource # type: ignore + from ._models import AvroWriteSettings # type: ignore from ._models import AzureBatchLinkedService # type: ignore from ._models import AzureBlobFSLinkedService # type: ignore + from ._models import AzureBlobFSLocation # type: ignore + from ._models import AzureBlobFSReadSettings # type: ignore + from ._models import AzureBlobFSSink # type: ignore + from ._models import AzureBlobFSSource # type: ignore + from ._models import AzureBlobFSWriteSettings # type: ignore from ._models import AzureBlobStorageLinkedService # type: ignore + from ._models import AzureBlobStorageLocation # type: ignore + from ._models import AzureBlobStorageReadSettings # type: ignore + from ._models import AzureBlobStorageWriteSettings # type: ignore from ._models import AzureDataExplorerCommandActivity # type: ignore from ._models import AzureDataExplorerLinkedService # type: ignore + from ._models import AzureDataExplorerSink # type: ignore + from ._models import AzureDataExplorerSource # type: ignore from ._models import AzureDataExplorerTableDataset # type: ignore from ._models import AzureDataLakeAnalyticsLinkedService # type: ignore from ._models import AzureDataLakeStoreLinkedService # type: ignore + from ._models import AzureDataLakeStoreLocation # type: ignore + from ._models import AzureDataLakeStoreReadSettings # type: ignore + from ._models import AzureDataLakeStoreSink # type: ignore + from ._models import AzureDataLakeStoreSource # type: ignore + from ._models import AzureDataLakeStoreWriteSettings # type: ignore from ._models import AzureDatabricksLinkedService # type: ignore + from ._models import AzureEntityResource # type: ignore from ._models import AzureFileStorageLinkedService # type: ignore + from ._models import AzureFileStorageLocation # type: ignore + from ._models import AzureFileStorageReadSettings # type: ignore from ._models import AzureFunctionActivity # type: ignore from ._models import AzureFunctionLinkedService # type: ignore from ._models import AzureKeyVaultLinkedService # type: ignore @@ -401,40 +658,70 @@ from ._models import AzureMLUpdateResourceActivity # type: ignore from ._models import AzureMLWebServiceFile # type: ignore from ._models import AzureMariaDBLinkedService # type: ignore + from ._models import AzureMariaDBSource # type: ignore from ._models import AzureMariaDBTableDataset # type: ignore from ._models import AzureMySqlLinkedService # type: ignore + from ._models import AzureMySqlSink # type: ignore + from ._models import AzureMySqlSource # type: ignore from ._models import AzureMySqlTableDataset # type: ignore from ._models import AzurePostgreSqlLinkedService # type: ignore + from ._models import AzurePostgreSqlSink # type: ignore + from ._models import AzurePostgreSqlSource # type: ignore from ._models import AzurePostgreSqlTableDataset # type: ignore + from ._models import AzureQueueSink # type: ignore from ._models import AzureSearchIndexDataset # type: ignore + from ._models import AzureSearchIndexSink # type: ignore from ._models import AzureSearchLinkedService # type: ignore from ._models import AzureSqlDWLinkedService # type: ignore from ._models import AzureSqlDWTableDataset # type: ignore from ._models import AzureSqlDatabaseLinkedService # type: ignore from ._models import AzureSqlMILinkedService # type: ignore from ._models import AzureSqlMITableDataset # type: ignore + from ._models import AzureSqlSink # type: ignore + from ._models import AzureSqlSource # type: ignore from ._models import AzureSqlTableDataset # type: ignore from ._models import AzureStorageLinkedService # type: ignore from ._models import AzureTableDataset # type: ignore + from ._models import AzureTableSink # type: ignore + from ._models import AzureTableSource # type: ignore from ._models import AzureTableStorageLinkedService # type: ignore from ._models import BigDataPoolReference # type: ignore + from ._models import BigDataPoolResourceInfo # type: ignore + from ._models import BigDataPoolResourceInfoListResult # type: ignore from ._models import BinaryDataset # type: ignore + from ._models import BinarySink # type: ignore + from ._models import BinarySource # type: ignore + from ._models import BlobEventsTrigger # type: ignore + from ._models import BlobSink # type: ignore + from ._models import BlobSource # type: ignore + from ._models import BlobTrigger # type: ignore from ._models import CassandraLinkedService # type: ignore + from ._models import CassandraSource # type: ignore from ._models import CassandraTableDataset # type: ignore + from ._models import ChainingTrigger # type: ignore from ._models import CloudError # type: ignore from ._models import CommonDataServiceForAppsEntityDataset # type: ignore from ._models import CommonDataServiceForAppsLinkedService # type: ignore + from ._models import CommonDataServiceForAppsSink # type: ignore + from ._models import CommonDataServiceForAppsSource # type: ignore from ._models import ConcurLinkedService # type: ignore from ._models import ConcurObjectDataset # type: ignore + from ._models import ConcurSource # type: ignore from ._models import ControlActivity # type: ignore from ._models import CopyActivity # type: ignore from ._models import CopySink # type: ignore from ._models import CopySource # type: ignore + from ._models import CopyTranslator # type: ignore from ._models import CosmosDbLinkedService # type: ignore from ._models import CosmosDbMongoDbApiCollectionDataset # type: ignore from ._models import CosmosDbMongoDbApiLinkedService # type: ignore + from ._models import CosmosDbMongoDbApiSink # type: ignore + from ._models import CosmosDbMongoDbApiSource # type: ignore from ._models import CosmosDbSqlApiCollectionDataset # type: ignore + from ._models import CosmosDbSqlApiSink # type: ignore + from ._models import CosmosDbSqlApiSource # type: ignore from ._models import CouchbaseLinkedService # type: ignore + from ._models import CouchbaseSource # type: ignore from ._models import CouchbaseTableDataset # type: ignore from ._models import CreateDataFlowDebugSessionRequest # type: ignore from ._models import CreateDataFlowDebugSessionResponse # type: ignore @@ -442,6 +729,10 @@ from ._models import CustomActivity # type: ignore from ._models import CustomActivityReferenceObject # type: ignore from ._models import CustomDataSourceLinkedService # type: ignore + from ._models import CustomDataset # type: ignore + from ._models import CustomSetupBase # type: ignore + from ._models import DWCopyCommandDefaultValue # type: ignore + from ._models import DWCopyCommandSettings # type: ignore from ._models import DataFlow # type: ignore from ._models import DataFlowDebugCommandRequest # type: ignore from ._models import DataFlowDebugCommandResponse # type: ignore @@ -462,12 +753,14 @@ from ._models import DataFlowSourceSetting # type: ignore from ._models import DataFlowStagingInfo # type: ignore from ._models import DataLakeAnalyticsUSQLActivity # type: ignore + from ._models import DataLakeStorageAccountDetails # type: ignore from ._models import DatabricksNotebookActivity # type: ignore from ._models import DatabricksSparkJarActivity # type: ignore from ._models import DatabricksSparkPythonActivity # type: ignore from ._models import Dataset # type: ignore from ._models import DatasetBZip2Compression # type: ignore from ._models import DatasetCompression # type: ignore + from ._models import DatasetDataElement # type: ignore from ._models import DatasetDebugResource # type: ignore from ._models import DatasetDeflateCompression # type: ignore from ._models import DatasetFolder # type: ignore @@ -476,23 +769,45 @@ from ._models import DatasetLocation # type: ignore from ._models import DatasetReference # type: ignore from ._models import DatasetResource # type: ignore + from ._models import DatasetSchemaDataElement # type: ignore + from ._models import DatasetStorageFormat # type: ignore from ._models import DatasetZipDeflateCompression # type: ignore from ._models import Db2LinkedService # type: ignore + from ._models import Db2Source # type: ignore from ._models import Db2TableDataset # type: ignore from ._models import DeleteActivity # type: ignore from ._models import DeleteDataFlowDebugSessionRequest # type: ignore from ._models import DelimitedTextDataset # type: ignore + from ._models import DelimitedTextReadSettings # type: ignore + from ._models import DelimitedTextSink # type: ignore + from ._models import DelimitedTextSource # type: ignore + from ._models import DelimitedTextWriteSettings # type: ignore + from ._models import DependencyReference # type: ignore + from ._models import DistcpSettings # type: ignore from ._models import DocumentDbCollectionDataset # type: ignore + from ._models import DocumentDbCollectionSink # type: ignore + from ._models import DocumentDbCollectionSource # type: ignore from ._models import DrillLinkedService # type: ignore + from ._models import DrillSource # type: ignore from ._models import DrillTableDataset # type: ignore from ._models import DynamicsAXLinkedService # type: ignore from ._models import DynamicsAXResourceDataset # type: ignore + from ._models import DynamicsAXSource # type: ignore from ._models import DynamicsCrmEntityDataset # type: ignore from ._models import DynamicsCrmLinkedService # type: ignore + from ._models import DynamicsCrmSink # type: ignore + from ._models import DynamicsCrmSource # type: ignore from ._models import DynamicsEntityDataset # type: ignore from ._models import DynamicsLinkedService # type: ignore + from ._models import DynamicsSink # type: ignore + from ._models import DynamicsSource # type: ignore from ._models import EloquaLinkedService # type: ignore from ._models import EloquaObjectDataset # type: ignore + from ._models import EloquaSource # type: ignore + from ._models import EntityReference # type: ignore + from ._models import ErrorAdditionalInfo # type: ignore + from ._models import ErrorContract # type: ignore + from ._models import ErrorResponse # type: ignore from ._models import EvaluateDataFlowExpressionRequest # type: ignore from ._models import ExecuteDataFlowActivity # type: ignore from ._models import ExecuteDataFlowActivityTypePropertiesCompute # type: ignore @@ -503,20 +818,35 @@ from ._models import ExposureControlResponse # type: ignore from ._models import Expression # type: ignore from ._models import FileServerLinkedService # type: ignore + from ._models import FileServerLocation # type: ignore + from ._models import FileServerReadSettings # type: ignore + from ._models import FileServerWriteSettings # type: ignore + from ._models import FileSystemSink # type: ignore + from ._models import FileSystemSource # type: ignore from ._models import FilterActivity # type: ignore from ._models import ForEachActivity # type: ignore + from ._models import FormatReadSettings # type: ignore + from ._models import FormatWriteSettings # type: ignore + from ._models import FtpReadSettings # type: ignore from ._models import FtpServerLinkedService # type: ignore + from ._models import FtpServerLocation # type: ignore from ._models import GetMetadataActivity # type: ignore from ._models import GetSsisObjectMetadataRequest # type: ignore from ._models import GoogleAdWordsLinkedService # type: ignore from ._models import GoogleAdWordsObjectDataset # type: ignore + from ._models import GoogleAdWordsSource # type: ignore from ._models import GoogleBigQueryLinkedService # type: ignore from ._models import GoogleBigQueryObjectDataset # type: ignore + from ._models import GoogleBigQuerySource # type: ignore from ._models import GoogleCloudStorageLinkedService # type: ignore + from ._models import GoogleCloudStorageLocation # type: ignore + from ._models import GoogleCloudStorageReadSettings # type: ignore from ._models import GreenplumLinkedService # type: ignore + from ._models import GreenplumSource # type: ignore from ._models import GreenplumTableDataset # type: ignore from ._models import HBaseLinkedService # type: ignore from ._models import HBaseObjectDataset # type: ignore + from ._models import HBaseSource # type: ignore from ._models import HDInsightHiveActivity # type: ignore from ._models import HDInsightLinkedService # type: ignore from ._models import HDInsightMapReduceActivity # type: ignore @@ -525,20 +855,50 @@ from ._models import HDInsightSparkActivity # type: ignore from ._models import HDInsightStreamingActivity # type: ignore from ._models import HdfsLinkedService # type: ignore + from ._models import HdfsLocation # type: ignore + from ._models import HdfsReadSettings # type: ignore + from ._models import HdfsSource # type: ignore from ._models import HiveLinkedService # type: ignore from ._models import HiveObjectDataset # type: ignore + from ._models import HiveSource # type: ignore from ._models import HttpLinkedService # type: ignore + from ._models import HttpReadSettings # type: ignore + from ._models import HttpServerLocation # type: ignore + from ._models import HttpSource # type: ignore from ._models import HubspotLinkedService # type: ignore from ._models import HubspotObjectDataset # type: ignore + from ._models import HubspotSource # type: ignore from ._models import IfConditionActivity # type: ignore from ._models import ImpalaLinkedService # type: ignore from ._models import ImpalaObjectDataset # type: ignore + from ._models import ImpalaSource # type: ignore from ._models import InformixLinkedService # type: ignore + from ._models import InformixSink # type: ignore + from ._models import InformixSource # type: ignore from ._models import InformixTableDataset # type: ignore + from ._models import IntegrationRuntime # type: ignore + from ._models import IntegrationRuntimeComputeProperties # type: ignore + from ._models import IntegrationRuntimeCustomSetupScriptProperties # type: ignore + from ._models import IntegrationRuntimeDataFlowProperties # type: ignore + from ._models import IntegrationRuntimeDataProxyProperties # type: ignore + from ._models import IntegrationRuntimeListResponse # type: ignore from ._models import IntegrationRuntimeReference # type: ignore + from ._models import IntegrationRuntimeResource # type: ignore + from ._models import IntegrationRuntimeSsisCatalogInfo # type: ignore + from ._models import IntegrationRuntimeSsisProperties # type: ignore + from ._models import IntegrationRuntimeVNetProperties # type: ignore from ._models import JiraLinkedService # type: ignore from ._models import JiraObjectDataset # type: ignore + from ._models import JiraSource # type: ignore from ._models import JsonDataset # type: ignore + from ._models import JsonFormat # type: ignore + from ._models import JsonSink # type: ignore + from ._models import JsonSource # type: ignore + from ._models import JsonWriteSettings # type: ignore + from ._models import LibraryRequirements # type: ignore + from ._models import LinkedIntegrationRuntimeKeyAuthorization # type: ignore + from ._models import LinkedIntegrationRuntimeRbacAuthorization # type: ignore + from ._models import LinkedIntegrationRuntimeType # type: ignore from ._models import LinkedService # type: ignore from ._models import LinkedServiceDebugResource # type: ignore from ._models import LinkedServiceListResponse # type: ignore @@ -548,21 +908,34 @@ from ._models import LookupActivity # type: ignore from ._models import MagentoLinkedService # type: ignore from ._models import MagentoObjectDataset # type: ignore + from ._models import MagentoSource # type: ignore + from ._models import ManagedIdentity # type: ignore + from ._models import ManagedIntegrationRuntime # type: ignore from ._models import MappingDataFlow # type: ignore from ._models import MariaDBLinkedService # type: ignore + from ._models import MariaDBSource # type: ignore from ._models import MariaDBTableDataset # type: ignore from ._models import MarketoLinkedService # type: ignore from ._models import MarketoObjectDataset # type: ignore + from ._models import MarketoSource # type: ignore from ._models import MicrosoftAccessLinkedService # type: ignore + from ._models import MicrosoftAccessSink # type: ignore + from ._models import MicrosoftAccessSource # type: ignore from ._models import MicrosoftAccessTableDataset # type: ignore from ._models import MongoDbCollectionDataset # type: ignore + from ._models import MongoDbCursorMethodsProperties # type: ignore from ._models import MongoDbLinkedService # type: ignore + from ._models import MongoDbSource # type: ignore from ._models import MongoDbV2CollectionDataset # type: ignore from ._models import MongoDbV2LinkedService # type: ignore + from ._models import MongoDbV2Source # type: ignore from ._models import MultiplePipelineTrigger # type: ignore from ._models import MySqlLinkedService # type: ignore + from ._models import MySqlSource # type: ignore from ._models import MySqlTableDataset # type: ignore from ._models import NetezzaLinkedService # type: ignore + from ._models import NetezzaPartitionSettings # type: ignore + from ._models import NetezzaSource # type: ignore from ._models import NetezzaTableDataset # type: ignore from ._models import Notebook # type: ignore from ._models import NotebookCell # type: ignore @@ -575,21 +948,37 @@ from ._models import NotebookSessionProperties # type: ignore from ._models import ODataLinkedService # type: ignore from ._models import ODataResourceDataset # type: ignore + from ._models import ODataSource # type: ignore from ._models import OdbcLinkedService # type: ignore + from ._models import OdbcSink # type: ignore + from ._models import OdbcSource # type: ignore from ._models import OdbcTableDataset # type: ignore from ._models import Office365Dataset # type: ignore from ._models import Office365LinkedService # type: ignore + from ._models import Office365Source # type: ignore from ._models import OracleLinkedService # type: ignore + from ._models import OraclePartitionSettings # type: ignore from ._models import OracleServiceCloudLinkedService # type: ignore from ._models import OracleServiceCloudObjectDataset # type: ignore + from ._models import OracleServiceCloudSource # type: ignore + from ._models import OracleSink # type: ignore + from ._models import OracleSource # type: ignore from ._models import OracleTableDataset # type: ignore from ._models import OrcDataset # type: ignore + from ._models import OrcFormat # type: ignore + from ._models import OrcSink # type: ignore + from ._models import OrcSource # type: ignore from ._models import ParameterSpecification # type: ignore from ._models import ParquetDataset # type: ignore + from ._models import ParquetFormat # type: ignore + from ._models import ParquetSink # type: ignore + from ._models import ParquetSource # type: ignore from ._models import PaypalLinkedService # type: ignore from ._models import PaypalObjectDataset # type: ignore + from ._models import PaypalSource # type: ignore from ._models import PhoenixLinkedService # type: ignore from ._models import PhoenixObjectDataset # type: ignore + from ._models import PhoenixSource # type: ignore from ._models import PipelineFolder # type: ignore from ._models import PipelineListResponse # type: ignore from ._models import PipelineReference # type: ignore @@ -597,14 +986,26 @@ from ._models import PipelineRun # type: ignore from ._models import PipelineRunInvokedBy # type: ignore from ._models import PipelineRunsQueryResponse # type: ignore + from ._models import PolybaseSettings # type: ignore from ._models import PostgreSqlLinkedService # type: ignore + from ._models import PostgreSqlSource # type: ignore from ._models import PostgreSqlTableDataset # type: ignore from ._models import PrestoLinkedService # type: ignore from ._models import PrestoObjectDataset # type: ignore + from ._models import PrestoSource # type: ignore + from ._models import PrivateEndpoint # type: ignore + from ._models import PrivateEndpointConnection # type: ignore + from ._models import PrivateLinkServiceConnectionState # type: ignore + from ._models import ProxyResource # type: ignore from ._models import QueryDataFlowDebugSessionsResponse # type: ignore from ._models import QuickBooksLinkedService # type: ignore from ._models import QuickBooksObjectDataset # type: ignore + from ._models import QuickBooksSource # type: ignore + from ._models import RecurrenceSchedule # type: ignore + from ._models import RecurrenceScheduleOccurrence # type: ignore from ._models import RedirectIncompatibleRowSettings # type: ignore + from ._models import RedshiftUnloadSettings # type: ignore + from ._models import RelationalSource # type: ignore from ._models import RelationalTableDataset # type: ignore from ._models import RerunTriggerListResponse # type: ignore from ._models import RerunTriggerResource # type: ignore @@ -613,8 +1014,11 @@ from ._models import Resource # type: ignore from ._models import ResponsysLinkedService # type: ignore from ._models import ResponsysObjectDataset # type: ignore + from ._models import ResponsysSource # type: ignore from ._models import RestResourceDataset # type: ignore from ._models import RestServiceLinkedService # type: ignore + from ._models import RestSource # type: ignore + from ._models import RetryPolicy # type: ignore from ._models import RunFilterParameters # type: ignore from ._models import RunQueryFilter # type: ignore from ._models import RunQueryOrderBy # type: ignore @@ -628,30 +1032,54 @@ from ._models import SalesforceLinkedService # type: ignore from ._models import SalesforceMarketingCloudLinkedService # type: ignore from ._models import SalesforceMarketingCloudObjectDataset # type: ignore + from ._models import SalesforceMarketingCloudSource # type: ignore from ._models import SalesforceObjectDataset # type: ignore from ._models import SalesforceServiceCloudLinkedService # type: ignore from ._models import SalesforceServiceCloudObjectDataset # type: ignore + from ._models import SalesforceServiceCloudSink # type: ignore + from ._models import SalesforceServiceCloudSource # type: ignore + from ._models import SalesforceSink # type: ignore + from ._models import SalesforceSource # type: ignore from ._models import SapBWLinkedService # type: ignore from ._models import SapBwCubeDataset # type: ignore + from ._models import SapBwSource # type: ignore from ._models import SapCloudForCustomerLinkedService # type: ignore from ._models import SapCloudForCustomerResourceDataset # type: ignore + from ._models import SapCloudForCustomerSink # type: ignore + from ._models import SapCloudForCustomerSource # type: ignore from ._models import SapEccLinkedService # type: ignore from ._models import SapEccResourceDataset # type: ignore + from ._models import SapEccSource # type: ignore from ._models import SapHanaLinkedService # type: ignore + from ._models import SapHanaPartitionSettings # type: ignore + from ._models import SapHanaSource # type: ignore from ._models import SapHanaTableDataset # type: ignore from ._models import SapOpenHubLinkedService # type: ignore + from ._models import SapOpenHubSource # type: ignore from ._models import SapOpenHubTableDataset # type: ignore from ._models import SapTableLinkedService # type: ignore + from ._models import SapTablePartitionSettings # type: ignore from ._models import SapTableResourceDataset # type: ignore + from ._models import SapTableSource # type: ignore + from ._models import ScheduleTrigger # type: ignore + from ._models import ScheduleTriggerRecurrence # type: ignore from ._models import ScriptAction # type: ignore from ._models import SecretBase # type: ignore from ._models import SecureString # type: ignore + from ._models import SelfDependencyTumblingWindowTriggerReference # type: ignore + from ._models import SelfHostedIntegrationRuntime # type: ignore from ._models import ServiceNowLinkedService # type: ignore from ._models import ServiceNowObjectDataset # type: ignore + from ._models import ServiceNowSource # type: ignore from ._models import SetVariableActivity # type: ignore + from ._models import SftpLocation # type: ignore + from ._models import SftpReadSettings # type: ignore from ._models import SftpServerLinkedService # type: ignore + from ._models import SftpWriteSettings # type: ignore from ._models import ShopifyLinkedService # type: ignore from ._models import ShopifyObjectDataset # type: ignore + from ._models import ShopifySource # type: ignore + from ._models import Sku # type: ignore from ._models import SparkBatchJob # type: ignore from ._models import SparkBatchJobState # type: ignore from ._models import SparkJobDefinition # type: ignore @@ -664,7 +1092,14 @@ from ._models import SparkScheduler # type: ignore from ._models import SparkServiceError # type: ignore from ._models import SparkServicePlugin # type: ignore + from ._models import SparkSource # type: ignore from ._models import SqlConnection # type: ignore + from ._models import SqlDWSink # type: ignore + from ._models import SqlDWSource # type: ignore + from ._models import SqlMISink # type: ignore + from ._models import SqlMISource # type: ignore + from ._models import SqlPool # type: ignore + from ._models import SqlPoolInfoListResult # type: ignore from ._models import SqlPoolReference # type: ignore from ._models import SqlPoolStoredProcedureActivity # type: ignore from ._models import SqlScript # type: ignore @@ -673,42 +1108,62 @@ from ._models import SqlScriptResource # type: ignore from ._models import SqlScriptsListResponse # type: ignore from ._models import SqlServerLinkedService # type: ignore + from ._models import SqlServerSink # type: ignore + from ._models import SqlServerSource # type: ignore from ._models import SqlServerStoredProcedureActivity # type: ignore from ._models import SqlServerTableDataset # type: ignore + from ._models import SqlSink # type: ignore + from ._models import SqlSource # type: ignore from ._models import SquareLinkedService # type: ignore from ._models import SquareObjectDataset # type: ignore + from ._models import SquareSource # type: ignore from ._models import SsisObjectMetadataStatusResponse # type: ignore from ._models import StagingSettings # type: ignore from ._models import StartDataFlowDebugSessionRequest # type: ignore from ._models import StartDataFlowDebugSessionResponse # type: ignore + from ._models import StoreReadSettings # type: ignore + from ._models import StoreWriteSettings # type: ignore from ._models import StoredProcedureParameter # type: ignore from ._models import SubResource # type: ignore from ._models import SubResourceDebugResource # type: ignore from ._models import SwitchActivity # type: ignore from ._models import SwitchCase # type: ignore from ._models import SybaseLinkedService # type: ignore + from ._models import SybaseSource # type: ignore from ._models import SybaseTableDataset # type: ignore from ._models import SynapseNotebookActivity # type: ignore from ._models import SynapseNotebookReference # type: ignore from ._models import SynapseSparkJobDefinitionActivity # type: ignore from ._models import SynapseSparkJobReference # type: ignore + from ._models import TabularSource # type: ignore + from ._models import TabularTranslator # type: ignore from ._models import TeradataLinkedService # type: ignore + from ._models import TeradataPartitionSettings # type: ignore + from ._models import TeradataSource # type: ignore from ._models import TeradataTableDataset # type: ignore + from ._models import TextFormat # type: ignore + from ._models import TrackedResource # type: ignore from ._models import Transformation # type: ignore from ._models import Trigger # type: ignore from ._models import TriggerDependencyProvisioningStatus # type: ignore + from ._models import TriggerDependencyReference # type: ignore from ._models import TriggerListResponse # type: ignore from ._models import TriggerPipelineReference # type: ignore + from ._models import TriggerReference # type: ignore from ._models import TriggerResource # type: ignore from ._models import TriggerRun # type: ignore from ._models import TriggerRunsQueryResponse # type: ignore from ._models import TriggerSubscriptionOperationStatus # type: ignore + from ._models import TumblingWindowTrigger # type: ignore + from ._models import TumblingWindowTriggerDependencyReference # type: ignore from ._models import UntilActivity # type: ignore from ._models import UserProperty # type: ignore from ._models import ValidationActivity # type: ignore from ._models import VariableSpecification # type: ignore from ._models import VerticaLinkedService # type: ignore + from ._models import VerticaSource # type: ignore from ._models import VerticaTableDataset # type: ignore + from ._models import VirtualNetworkProfile # type: ignore from ._models import WaitActivity # type: ignore from ._models import WebActivity # type: ignore from ._models import WebActivityAuthentication # type: ignore @@ -718,30 +1173,39 @@ from ._models import WebHookActivity # type: ignore from ._models import WebLinkedService # type: ignore from ._models import WebLinkedServiceTypeProperties # type: ignore + from ._models import WebSource # type: ignore from ._models import WebTableDataset # type: ignore from ._models import Workspace # type: ignore from ._models import WorkspaceIdentity # type: ignore from ._models import WorkspaceUpdateParameters # type: ignore from ._models import XeroLinkedService # type: ignore from ._models import XeroObjectDataset # type: ignore + from ._models import XeroSource # type: ignore from ._models import ZohoLinkedService # type: ignore from ._models import ZohoObjectDataset # type: ignore + from ._models import ZohoSource # type: ignore from ._artifacts_client_enums import ( AvroCompressionCodec, AzureFunctionActivityMethod, + AzureSearchIndexWriteBehaviorType, BigDataPoolReferenceType, + BlobEventTypes, + CassandraSourceReadConsistencyLevels, CellOutputType, + CopyBehaviorType, DataFlowComputeType, DataFlowReferenceType, DatasetCompressionLevel, DatasetReferenceType, + DayOfWeek, Db2AuthenticationType, DelimitedTextCompressionCodec, DependencyCondition, DynamicsAuthenticationType, DynamicsDeploymentType, DynamicsServicePrincipalCredentialType, + DynamicsSinkWriteBehavior, EventSubscriptionStatus, ExpressionType, FtpAuthenticationType, @@ -755,24 +1219,45 @@ HiveThriftTransportProtocol, HttpAuthenticationType, ImpalaAuthenticationType, + IntegrationRuntimeEdition, + IntegrationRuntimeEntityReferenceType, + IntegrationRuntimeLicenseType, IntegrationRuntimeReferenceType, + IntegrationRuntimeSsisCatalogPricingTier, + IntegrationRuntimeState, + IntegrationRuntimeType, + JsonFormatFilePattern, + JsonWriteFilePattern, MongoDbAuthenticationType, + NetezzaPartitionOption, + NodeSize, + NodeSizeFamily, NotebookReferenceType, ODataAadServicePrincipalCredentialType, ODataAuthenticationType, + OraclePartitionOption, OrcCompressionCodec, ParameterType, ParquetCompressionCodec, PhoenixAuthenticationType, PipelineReferenceType, PluginCurrentState, + PolybaseSettingsRejectType, PrestoAuthenticationType, + PrivateLinkServiceConnectionStateStatus, + RecurrenceFrequency, + ResourceIdentityType, RestServiceAuthenticationType, RunQueryFilterOperand, RunQueryFilterOperator, RunQueryOrder, RunQueryOrderByField, + SalesforceSinkWriteBehavior, + SalesforceSourceReadBehavior, + SapCloudForCustomerSinkWriteBehavior, SapHanaAuthenticationType, + SapHanaPartitionOption, + SapTablePartitionOption, SchedulerCurrentState, ServiceNowAuthenticationType, SftpAuthenticationType, @@ -791,8 +1276,11 @@ StoredProcedureParameterType, SybaseAuthenticationType, TeradataAuthenticationType, + TeradataPartitionOption, + TriggerReferenceType, TriggerRunStatus, TriggerRuntimeState, + TumblingWindowFrequency, Type, VariableType, WebActivityMethod, @@ -809,21 +1297,49 @@ 'AddDataFlowToDebugSessionResponse', 'AmazonMWSLinkedService', 'AmazonMWSObjectDataset', + 'AmazonMWSSource', 'AmazonRedshiftLinkedService', + 'AmazonRedshiftSource', 'AmazonRedshiftTableDataset', 'AmazonS3LinkedService', + 'AmazonS3Location', + 'AmazonS3ReadSettings', 'AppendVariableActivity', + 'AutoPauseProperties', + 'AutoScaleProperties', 'AvroDataset', + 'AvroFormat', + 'AvroSink', + 'AvroSource', + 'AvroWriteSettings', 'AzureBatchLinkedService', 'AzureBlobFSLinkedService', + 'AzureBlobFSLocation', + 'AzureBlobFSReadSettings', + 'AzureBlobFSSink', + 'AzureBlobFSSource', + 'AzureBlobFSWriteSettings', 'AzureBlobStorageLinkedService', + 'AzureBlobStorageLocation', + 'AzureBlobStorageReadSettings', + 'AzureBlobStorageWriteSettings', 'AzureDataExplorerCommandActivity', 'AzureDataExplorerLinkedService', + 'AzureDataExplorerSink', + 'AzureDataExplorerSource', 'AzureDataExplorerTableDataset', 'AzureDataLakeAnalyticsLinkedService', 'AzureDataLakeStoreLinkedService', + 'AzureDataLakeStoreLocation', + 'AzureDataLakeStoreReadSettings', + 'AzureDataLakeStoreSink', + 'AzureDataLakeStoreSource', + 'AzureDataLakeStoreWriteSettings', 'AzureDatabricksLinkedService', + 'AzureEntityResource', 'AzureFileStorageLinkedService', + 'AzureFileStorageLocation', + 'AzureFileStorageReadSettings', 'AzureFunctionActivity', 'AzureFunctionLinkedService', 'AzureKeyVaultLinkedService', @@ -835,40 +1351,70 @@ 'AzureMLUpdateResourceActivity', 'AzureMLWebServiceFile', 'AzureMariaDBLinkedService', + 'AzureMariaDBSource', 'AzureMariaDBTableDataset', 'AzureMySqlLinkedService', + 'AzureMySqlSink', + 'AzureMySqlSource', 'AzureMySqlTableDataset', 'AzurePostgreSqlLinkedService', + 'AzurePostgreSqlSink', + 'AzurePostgreSqlSource', 'AzurePostgreSqlTableDataset', + 'AzureQueueSink', 'AzureSearchIndexDataset', + 'AzureSearchIndexSink', 'AzureSearchLinkedService', 'AzureSqlDWLinkedService', 'AzureSqlDWTableDataset', 'AzureSqlDatabaseLinkedService', 'AzureSqlMILinkedService', 'AzureSqlMITableDataset', + 'AzureSqlSink', + 'AzureSqlSource', 'AzureSqlTableDataset', 'AzureStorageLinkedService', 'AzureTableDataset', + 'AzureTableSink', + 'AzureTableSource', 'AzureTableStorageLinkedService', 'BigDataPoolReference', + 'BigDataPoolResourceInfo', + 'BigDataPoolResourceInfoListResult', 'BinaryDataset', + 'BinarySink', + 'BinarySource', + 'BlobEventsTrigger', + 'BlobSink', + 'BlobSource', + 'BlobTrigger', 'CassandraLinkedService', + 'CassandraSource', 'CassandraTableDataset', + 'ChainingTrigger', 'CloudError', 'CommonDataServiceForAppsEntityDataset', 'CommonDataServiceForAppsLinkedService', + 'CommonDataServiceForAppsSink', + 'CommonDataServiceForAppsSource', 'ConcurLinkedService', 'ConcurObjectDataset', + 'ConcurSource', 'ControlActivity', 'CopyActivity', 'CopySink', 'CopySource', + 'CopyTranslator', 'CosmosDbLinkedService', 'CosmosDbMongoDbApiCollectionDataset', 'CosmosDbMongoDbApiLinkedService', + 'CosmosDbMongoDbApiSink', + 'CosmosDbMongoDbApiSource', 'CosmosDbSqlApiCollectionDataset', + 'CosmosDbSqlApiSink', + 'CosmosDbSqlApiSource', 'CouchbaseLinkedService', + 'CouchbaseSource', 'CouchbaseTableDataset', 'CreateDataFlowDebugSessionRequest', 'CreateDataFlowDebugSessionResponse', @@ -876,6 +1422,10 @@ 'CustomActivity', 'CustomActivityReferenceObject', 'CustomDataSourceLinkedService', + 'CustomDataset', + 'CustomSetupBase', + 'DWCopyCommandDefaultValue', + 'DWCopyCommandSettings', 'DataFlow', 'DataFlowDebugCommandRequest', 'DataFlowDebugCommandResponse', @@ -896,12 +1446,14 @@ 'DataFlowSourceSetting', 'DataFlowStagingInfo', 'DataLakeAnalyticsUSQLActivity', + 'DataLakeStorageAccountDetails', 'DatabricksNotebookActivity', 'DatabricksSparkJarActivity', 'DatabricksSparkPythonActivity', 'Dataset', 'DatasetBZip2Compression', 'DatasetCompression', + 'DatasetDataElement', 'DatasetDebugResource', 'DatasetDeflateCompression', 'DatasetFolder', @@ -910,23 +1462,45 @@ 'DatasetLocation', 'DatasetReference', 'DatasetResource', + 'DatasetSchemaDataElement', + 'DatasetStorageFormat', 'DatasetZipDeflateCompression', 'Db2LinkedService', + 'Db2Source', 'Db2TableDataset', 'DeleteActivity', 'DeleteDataFlowDebugSessionRequest', 'DelimitedTextDataset', + 'DelimitedTextReadSettings', + 'DelimitedTextSink', + 'DelimitedTextSource', + 'DelimitedTextWriteSettings', + 'DependencyReference', + 'DistcpSettings', 'DocumentDbCollectionDataset', + 'DocumentDbCollectionSink', + 'DocumentDbCollectionSource', 'DrillLinkedService', + 'DrillSource', 'DrillTableDataset', 'DynamicsAXLinkedService', 'DynamicsAXResourceDataset', + 'DynamicsAXSource', 'DynamicsCrmEntityDataset', 'DynamicsCrmLinkedService', + 'DynamicsCrmSink', + 'DynamicsCrmSource', 'DynamicsEntityDataset', 'DynamicsLinkedService', + 'DynamicsSink', + 'DynamicsSource', 'EloquaLinkedService', 'EloquaObjectDataset', + 'EloquaSource', + 'EntityReference', + 'ErrorAdditionalInfo', + 'ErrorContract', + 'ErrorResponse', 'EvaluateDataFlowExpressionRequest', 'ExecuteDataFlowActivity', 'ExecuteDataFlowActivityTypePropertiesCompute', @@ -937,20 +1511,35 @@ 'ExposureControlResponse', 'Expression', 'FileServerLinkedService', + 'FileServerLocation', + 'FileServerReadSettings', + 'FileServerWriteSettings', + 'FileSystemSink', + 'FileSystemSource', 'FilterActivity', 'ForEachActivity', + 'FormatReadSettings', + 'FormatWriteSettings', + 'FtpReadSettings', 'FtpServerLinkedService', + 'FtpServerLocation', 'GetMetadataActivity', 'GetSsisObjectMetadataRequest', 'GoogleAdWordsLinkedService', 'GoogleAdWordsObjectDataset', + 'GoogleAdWordsSource', 'GoogleBigQueryLinkedService', 'GoogleBigQueryObjectDataset', + 'GoogleBigQuerySource', 'GoogleCloudStorageLinkedService', + 'GoogleCloudStorageLocation', + 'GoogleCloudStorageReadSettings', 'GreenplumLinkedService', + 'GreenplumSource', 'GreenplumTableDataset', 'HBaseLinkedService', 'HBaseObjectDataset', + 'HBaseSource', 'HDInsightHiveActivity', 'HDInsightLinkedService', 'HDInsightMapReduceActivity', @@ -959,20 +1548,50 @@ 'HDInsightSparkActivity', 'HDInsightStreamingActivity', 'HdfsLinkedService', + 'HdfsLocation', + 'HdfsReadSettings', + 'HdfsSource', 'HiveLinkedService', 'HiveObjectDataset', + 'HiveSource', 'HttpLinkedService', + 'HttpReadSettings', + 'HttpServerLocation', + 'HttpSource', 'HubspotLinkedService', 'HubspotObjectDataset', + 'HubspotSource', 'IfConditionActivity', 'ImpalaLinkedService', 'ImpalaObjectDataset', + 'ImpalaSource', 'InformixLinkedService', + 'InformixSink', + 'InformixSource', 'InformixTableDataset', + 'IntegrationRuntime', + 'IntegrationRuntimeComputeProperties', + 'IntegrationRuntimeCustomSetupScriptProperties', + 'IntegrationRuntimeDataFlowProperties', + 'IntegrationRuntimeDataProxyProperties', + 'IntegrationRuntimeListResponse', 'IntegrationRuntimeReference', + 'IntegrationRuntimeResource', + 'IntegrationRuntimeSsisCatalogInfo', + 'IntegrationRuntimeSsisProperties', + 'IntegrationRuntimeVNetProperties', 'JiraLinkedService', 'JiraObjectDataset', + 'JiraSource', 'JsonDataset', + 'JsonFormat', + 'JsonSink', + 'JsonSource', + 'JsonWriteSettings', + 'LibraryRequirements', + 'LinkedIntegrationRuntimeKeyAuthorization', + 'LinkedIntegrationRuntimeRbacAuthorization', + 'LinkedIntegrationRuntimeType', 'LinkedService', 'LinkedServiceDebugResource', 'LinkedServiceListResponse', @@ -982,21 +1601,34 @@ 'LookupActivity', 'MagentoLinkedService', 'MagentoObjectDataset', + 'MagentoSource', + 'ManagedIdentity', + 'ManagedIntegrationRuntime', 'MappingDataFlow', 'MariaDBLinkedService', + 'MariaDBSource', 'MariaDBTableDataset', 'MarketoLinkedService', 'MarketoObjectDataset', + 'MarketoSource', 'MicrosoftAccessLinkedService', + 'MicrosoftAccessSink', + 'MicrosoftAccessSource', 'MicrosoftAccessTableDataset', 'MongoDbCollectionDataset', + 'MongoDbCursorMethodsProperties', 'MongoDbLinkedService', + 'MongoDbSource', 'MongoDbV2CollectionDataset', 'MongoDbV2LinkedService', + 'MongoDbV2Source', 'MultiplePipelineTrigger', 'MySqlLinkedService', + 'MySqlSource', 'MySqlTableDataset', 'NetezzaLinkedService', + 'NetezzaPartitionSettings', + 'NetezzaSource', 'NetezzaTableDataset', 'Notebook', 'NotebookCell', @@ -1009,21 +1641,37 @@ 'NotebookSessionProperties', 'ODataLinkedService', 'ODataResourceDataset', + 'ODataSource', 'OdbcLinkedService', + 'OdbcSink', + 'OdbcSource', 'OdbcTableDataset', 'Office365Dataset', 'Office365LinkedService', + 'Office365Source', 'OracleLinkedService', + 'OraclePartitionSettings', 'OracleServiceCloudLinkedService', 'OracleServiceCloudObjectDataset', + 'OracleServiceCloudSource', + 'OracleSink', + 'OracleSource', 'OracleTableDataset', 'OrcDataset', + 'OrcFormat', + 'OrcSink', + 'OrcSource', 'ParameterSpecification', 'ParquetDataset', + 'ParquetFormat', + 'ParquetSink', + 'ParquetSource', 'PaypalLinkedService', 'PaypalObjectDataset', + 'PaypalSource', 'PhoenixLinkedService', 'PhoenixObjectDataset', + 'PhoenixSource', 'PipelineFolder', 'PipelineListResponse', 'PipelineReference', @@ -1031,14 +1679,26 @@ 'PipelineRun', 'PipelineRunInvokedBy', 'PipelineRunsQueryResponse', + 'PolybaseSettings', 'PostgreSqlLinkedService', + 'PostgreSqlSource', 'PostgreSqlTableDataset', 'PrestoLinkedService', 'PrestoObjectDataset', + 'PrestoSource', + 'PrivateEndpoint', + 'PrivateEndpointConnection', + 'PrivateLinkServiceConnectionState', + 'ProxyResource', 'QueryDataFlowDebugSessionsResponse', 'QuickBooksLinkedService', 'QuickBooksObjectDataset', + 'QuickBooksSource', + 'RecurrenceSchedule', + 'RecurrenceScheduleOccurrence', 'RedirectIncompatibleRowSettings', + 'RedshiftUnloadSettings', + 'RelationalSource', 'RelationalTableDataset', 'RerunTriggerListResponse', 'RerunTriggerResource', @@ -1047,8 +1707,11 @@ 'Resource', 'ResponsysLinkedService', 'ResponsysObjectDataset', + 'ResponsysSource', 'RestResourceDataset', 'RestServiceLinkedService', + 'RestSource', + 'RetryPolicy', 'RunFilterParameters', 'RunQueryFilter', 'RunQueryOrderBy', @@ -1062,30 +1725,54 @@ 'SalesforceLinkedService', 'SalesforceMarketingCloudLinkedService', 'SalesforceMarketingCloudObjectDataset', + 'SalesforceMarketingCloudSource', 'SalesforceObjectDataset', 'SalesforceServiceCloudLinkedService', 'SalesforceServiceCloudObjectDataset', + 'SalesforceServiceCloudSink', + 'SalesforceServiceCloudSource', + 'SalesforceSink', + 'SalesforceSource', 'SapBWLinkedService', 'SapBwCubeDataset', + 'SapBwSource', 'SapCloudForCustomerLinkedService', 'SapCloudForCustomerResourceDataset', + 'SapCloudForCustomerSink', + 'SapCloudForCustomerSource', 'SapEccLinkedService', 'SapEccResourceDataset', + 'SapEccSource', 'SapHanaLinkedService', + 'SapHanaPartitionSettings', + 'SapHanaSource', 'SapHanaTableDataset', 'SapOpenHubLinkedService', + 'SapOpenHubSource', 'SapOpenHubTableDataset', 'SapTableLinkedService', + 'SapTablePartitionSettings', 'SapTableResourceDataset', + 'SapTableSource', + 'ScheduleTrigger', + 'ScheduleTriggerRecurrence', 'ScriptAction', 'SecretBase', 'SecureString', + 'SelfDependencyTumblingWindowTriggerReference', + 'SelfHostedIntegrationRuntime', 'ServiceNowLinkedService', 'ServiceNowObjectDataset', + 'ServiceNowSource', 'SetVariableActivity', + 'SftpLocation', + 'SftpReadSettings', 'SftpServerLinkedService', + 'SftpWriteSettings', 'ShopifyLinkedService', 'ShopifyObjectDataset', + 'ShopifySource', + 'Sku', 'SparkBatchJob', 'SparkBatchJobState', 'SparkJobDefinition', @@ -1098,7 +1785,14 @@ 'SparkScheduler', 'SparkServiceError', 'SparkServicePlugin', + 'SparkSource', 'SqlConnection', + 'SqlDWSink', + 'SqlDWSource', + 'SqlMISink', + 'SqlMISource', + 'SqlPool', + 'SqlPoolInfoListResult', 'SqlPoolReference', 'SqlPoolStoredProcedureActivity', 'SqlScript', @@ -1107,42 +1801,62 @@ 'SqlScriptResource', 'SqlScriptsListResponse', 'SqlServerLinkedService', + 'SqlServerSink', + 'SqlServerSource', 'SqlServerStoredProcedureActivity', 'SqlServerTableDataset', + 'SqlSink', + 'SqlSource', 'SquareLinkedService', 'SquareObjectDataset', + 'SquareSource', 'SsisObjectMetadataStatusResponse', 'StagingSettings', 'StartDataFlowDebugSessionRequest', 'StartDataFlowDebugSessionResponse', + 'StoreReadSettings', + 'StoreWriteSettings', 'StoredProcedureParameter', 'SubResource', 'SubResourceDebugResource', 'SwitchActivity', 'SwitchCase', 'SybaseLinkedService', + 'SybaseSource', 'SybaseTableDataset', 'SynapseNotebookActivity', 'SynapseNotebookReference', 'SynapseSparkJobDefinitionActivity', 'SynapseSparkJobReference', + 'TabularSource', + 'TabularTranslator', 'TeradataLinkedService', + 'TeradataPartitionSettings', + 'TeradataSource', 'TeradataTableDataset', + 'TextFormat', + 'TrackedResource', 'Transformation', 'Trigger', 'TriggerDependencyProvisioningStatus', + 'TriggerDependencyReference', 'TriggerListResponse', 'TriggerPipelineReference', + 'TriggerReference', 'TriggerResource', 'TriggerRun', 'TriggerRunsQueryResponse', 'TriggerSubscriptionOperationStatus', + 'TumblingWindowTrigger', + 'TumblingWindowTriggerDependencyReference', 'UntilActivity', 'UserProperty', 'ValidationActivity', 'VariableSpecification', 'VerticaLinkedService', + 'VerticaSource', 'VerticaTableDataset', + 'VirtualNetworkProfile', 'WaitActivity', 'WebActivity', 'WebActivityAuthentication', @@ -1152,28 +1866,37 @@ 'WebHookActivity', 'WebLinkedService', 'WebLinkedServiceTypeProperties', + 'WebSource', 'WebTableDataset', 'Workspace', 'WorkspaceIdentity', 'WorkspaceUpdateParameters', 'XeroLinkedService', 'XeroObjectDataset', + 'XeroSource', 'ZohoLinkedService', 'ZohoObjectDataset', + 'ZohoSource', 'AvroCompressionCodec', 'AzureFunctionActivityMethod', + 'AzureSearchIndexWriteBehaviorType', 'BigDataPoolReferenceType', + 'BlobEventTypes', + 'CassandraSourceReadConsistencyLevels', 'CellOutputType', + 'CopyBehaviorType', 'DataFlowComputeType', 'DataFlowReferenceType', 'DatasetCompressionLevel', 'DatasetReferenceType', + 'DayOfWeek', 'Db2AuthenticationType', 'DelimitedTextCompressionCodec', 'DependencyCondition', 'DynamicsAuthenticationType', 'DynamicsDeploymentType', 'DynamicsServicePrincipalCredentialType', + 'DynamicsSinkWriteBehavior', 'EventSubscriptionStatus', 'ExpressionType', 'FtpAuthenticationType', @@ -1187,24 +1910,45 @@ 'HiveThriftTransportProtocol', 'HttpAuthenticationType', 'ImpalaAuthenticationType', + 'IntegrationRuntimeEdition', + 'IntegrationRuntimeEntityReferenceType', + 'IntegrationRuntimeLicenseType', 'IntegrationRuntimeReferenceType', + 'IntegrationRuntimeSsisCatalogPricingTier', + 'IntegrationRuntimeState', + 'IntegrationRuntimeType', + 'JsonFormatFilePattern', + 'JsonWriteFilePattern', 'MongoDbAuthenticationType', + 'NetezzaPartitionOption', + 'NodeSize', + 'NodeSizeFamily', 'NotebookReferenceType', 'ODataAadServicePrincipalCredentialType', 'ODataAuthenticationType', + 'OraclePartitionOption', 'OrcCompressionCodec', 'ParameterType', 'ParquetCompressionCodec', 'PhoenixAuthenticationType', 'PipelineReferenceType', 'PluginCurrentState', + 'PolybaseSettingsRejectType', 'PrestoAuthenticationType', + 'PrivateLinkServiceConnectionStateStatus', + 'RecurrenceFrequency', + 'ResourceIdentityType', 'RestServiceAuthenticationType', 'RunQueryFilterOperand', 'RunQueryFilterOperator', 'RunQueryOrder', 'RunQueryOrderByField', + 'SalesforceSinkWriteBehavior', + 'SalesforceSourceReadBehavior', + 'SapCloudForCustomerSinkWriteBehavior', 'SapHanaAuthenticationType', + 'SapHanaPartitionOption', + 'SapTablePartitionOption', 'SchedulerCurrentState', 'ServiceNowAuthenticationType', 'SftpAuthenticationType', @@ -1223,8 +1967,11 @@ 'StoredProcedureParameterType', 'SybaseAuthenticationType', 'TeradataAuthenticationType', + 'TeradataPartitionOption', + 'TriggerReferenceType', 'TriggerRunStatus', 'TriggerRuntimeState', + 'TumblingWindowFrequency', 'Type', 'VariableType', 'WebActivityMethod', diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_artifacts_client_enums.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_artifacts_client_enums.py index 56dc9d2d4f7b..11cc7a225ebd 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_artifacts_client_enums.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_artifacts_client_enums.py @@ -46,12 +46,42 @@ class AzureFunctionActivityMethod(with_metaclass(_CaseInsensitiveEnumMeta, str, HEAD = "HEAD" TRACE = "TRACE" +class AzureSearchIndexWriteBehaviorType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Specify the write behavior when upserting documents into Azure Search Index. + """ + + MERGE = "Merge" + UPLOAD = "Upload" + class BigDataPoolReferenceType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Big data pool reference type. """ BIG_DATA_POOL_REFERENCE = "BigDataPoolReference" +class BlobEventTypes(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + + MICROSOFT_STORAGE_BLOB_CREATED = "Microsoft.Storage.BlobCreated" + MICROSOFT_STORAGE_BLOB_DELETED = "Microsoft.Storage.BlobDeleted" + +class CassandraSourceReadConsistencyLevels(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The consistency level specifies how many Cassandra servers must respond to a read request + before returning data to the client application. Cassandra checks the specified number of + Cassandra servers for data to satisfy the read request. Must be one of + cassandraSourceReadConsistencyLevels. The default value is 'ONE'. It is case-insensitive. + """ + + ALL = "ALL" + EACH_QUORUM = "EACH_QUORUM" + QUORUM = "QUORUM" + LOCAL_QUORUM = "LOCAL_QUORUM" + ONE = "ONE" + TWO = "TWO" + THREE = "THREE" + LOCAL_ONE = "LOCAL_ONE" + SERIAL = "SERIAL" + LOCAL_SERIAL = "LOCAL_SERIAL" + class CellOutputType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Execution, display, or stream outputs. """ @@ -61,6 +91,14 @@ class CellOutputType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): STREAM = "stream" ERROR = "error" +class CopyBehaviorType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """All available types of copy behavior. + """ + + PRESERVE_HIERARCHY = "PreserveHierarchy" + FLATTEN_HIERARCHY = "FlattenHierarchy" + MERGE_FILES = "MergeFiles" + class DataFlowComputeType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Compute type of the cluster which will execute data flow job. """ @@ -88,6 +126,16 @@ class DatasetReferenceType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): DATASET_REFERENCE = "DatasetReference" +class DayOfWeek(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + + SUNDAY = "Sunday" + MONDAY = "Monday" + TUESDAY = "Tuesday" + WEDNESDAY = "Wednesday" + THURSDAY = "Thursday" + FRIDAY = "Friday" + SATURDAY = "Saturday" + class Db2AuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """AuthenticationType to be used for connection. """ @@ -138,6 +186,12 @@ class DynamicsServicePrincipalCredentialType(with_metaclass(_CaseInsensitiveEnum SERVICE_PRINCIPAL_KEY = "ServicePrincipalKey" SERVICE_PRINCIPAL_CERT = "ServicePrincipalCert" +class DynamicsSinkWriteBehavior(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The write behavior for the operation. + """ + + UPSERT = "Upsert" + class EventSubscriptionStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Event Subscription Status. """ @@ -243,12 +297,80 @@ class ImpalaAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enu SASL_USERNAME = "SASLUsername" USERNAME_AND_PASSWORD = "UsernameAndPassword" +class IntegrationRuntimeEdition(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The edition for the SSIS Integration Runtime + """ + + STANDARD = "Standard" + ENTERPRISE = "Enterprise" + +class IntegrationRuntimeEntityReferenceType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The type of this referenced entity. + """ + + INTEGRATION_RUNTIME_REFERENCE = "IntegrationRuntimeReference" + LINKED_SERVICE_REFERENCE = "LinkedServiceReference" + +class IntegrationRuntimeLicenseType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """License type for bringing your own license scenario. + """ + + BASE_PRICE = "BasePrice" + LICENSE_INCLUDED = "LicenseIncluded" + class IntegrationRuntimeReferenceType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Type of integration runtime. """ INTEGRATION_RUNTIME_REFERENCE = "IntegrationRuntimeReference" +class IntegrationRuntimeSsisCatalogPricingTier(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The pricing tier for the catalog database. The valid values could be found in + https://azure.microsoft.com/en-us/pricing/details/sql-database/ + """ + + BASIC = "Basic" + STANDARD = "Standard" + PREMIUM = "Premium" + PREMIUM_RS = "PremiumRS" + +class IntegrationRuntimeState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The state of integration runtime. + """ + + INITIAL = "Initial" + STOPPED = "Stopped" + STARTED = "Started" + STARTING = "Starting" + STOPPING = "Stopping" + NEED_REGISTRATION = "NeedRegistration" + ONLINE = "Online" + LIMITED = "Limited" + OFFLINE = "Offline" + ACCESS_DENIED = "AccessDenied" + +class IntegrationRuntimeType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The type of integration runtime. + """ + + MANAGED = "Managed" + SELF_HOSTED = "SelfHosted" + +class JsonFormatFilePattern(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """JSON format file pattern. A property of JsonFormat. + """ + + SET_OF_OBJECTS = "setOfObjects" + ARRAY_OF_OBJECTS = "arrayOfObjects" + +class JsonWriteFilePattern(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """File pattern of JSON. This setting controls the way a collection of JSON objects will be + treated. The default value is 'setOfObjects'. It is case-sensitive. + """ + + SET_OF_OBJECTS = "setOfObjects" + ARRAY_OF_OBJECTS = "arrayOfObjects" + class MongoDbAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The authentication type to be used to connect to the MongoDB database. """ @@ -256,6 +378,32 @@ class MongoDbAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, En BASIC = "Basic" ANONYMOUS = "Anonymous" +class NetezzaPartitionOption(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The partition mechanism that will be used for Netezza read in parallel. + """ + + NONE = "None" + DATA_SLICE = "DataSlice" + DYNAMIC_RANGE = "DynamicRange" + +class NodeSize(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The level of compute power that each node in the Big Data pool has. + """ + + NONE = "None" + SMALL = "Small" + MEDIUM = "Medium" + LARGE = "Large" + X_LARGE = "XLarge" + XX_LARGE = "XXLarge" + +class NodeSizeFamily(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The kind of nodes that the Big Data pool provides. + """ + + NONE = "None" + MEMORY_OPTIMIZED = "MemoryOptimized" + class NotebookReferenceType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Synapse notebook reference type. """ @@ -279,6 +427,14 @@ class ODataAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum AAD_SERVICE_PRINCIPAL = "AadServicePrincipal" MANAGED_SERVICE_IDENTITY = "ManagedServiceIdentity" +class OraclePartitionOption(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The partition mechanism that will be used for Oracle read in parallel. + """ + + NONE = "None" + PHYSICAL_PARTITIONS_OF_TABLE = "PhysicalPartitionsOfTable" + DYNAMIC_RANGE = "DynamicRange" + class OrcCompressionCodec(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): NONE = "none" @@ -328,6 +484,13 @@ class PluginCurrentState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): CLEANUP = "Cleanup" ENDED = "Ended" +class PolybaseSettingsRejectType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Indicates whether the RejectValue property is specified as a literal value or a percentage. + """ + + VALUE = "value" + PERCENTAGE = "percentage" + class PrestoAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The authentication mechanism used to connect to the Presto server. """ @@ -335,6 +498,34 @@ class PrestoAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enu ANONYMOUS = "Anonymous" LDAP = "LDAP" +class PrivateLinkServiceConnectionStateStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The private link service connection status. + """ + + APPROVED = "Approved" + PENDING = "Pending" + REJECTED = "Rejected" + DISCONNECTED = "Disconnected" + +class RecurrenceFrequency(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Enumerates possible frequency option for the schedule trigger. + """ + + NOT_SPECIFIED = "NotSpecified" + MINUTE = "Minute" + HOUR = "Hour" + DAY = "Day" + WEEK = "Week" + MONTH = "Month" + YEAR = "Year" + +class ResourceIdentityType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The type of managed identity for the workspace + """ + + NONE = "None" + SYSTEM_ASSIGNED = "SystemAssigned" + class RestServiceAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Type of authentication used to connect to the REST service. """ @@ -397,6 +588,27 @@ class RunQueryOrderByField(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): TRIGGER_NAME = "TriggerName" TRIGGER_RUN_TIMESTAMP = "TriggerRunTimestamp" +class SalesforceSinkWriteBehavior(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The write behavior for the operation. Default is Insert. + """ + + INSERT = "Insert" + UPSERT = "Upsert" + +class SalesforceSourceReadBehavior(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The read behavior for the operation. Default is Query. + """ + + QUERY = "Query" + QUERY_ALL = "QueryAll" + +class SapCloudForCustomerSinkWriteBehavior(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The write behavior for the operation. Default is 'Insert'. + """ + + INSERT = "Insert" + UPDATE = "Update" + class SapHanaAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The authentication type to be used to connect to the SAP HANA server. """ @@ -404,6 +616,25 @@ class SapHanaAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, En BASIC = "Basic" WINDOWS = "Windows" +class SapHanaPartitionOption(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The partition mechanism that will be used for SAP HANA read in parallel. + """ + + NONE = "None" + PHYSICAL_PARTITIONS_OF_TABLE = "PhysicalPartitionsOfTable" + SAP_HANA_DYNAMIC_RANGE = "SapHanaDynamicRange" + +class SapTablePartitionOption(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The partition mechanism that will be used for SAP table read in parallel. + """ + + NONE = "None" + PARTITION_ON_INT = "PartitionOnInt" + PARTITION_ON_CALENDAR_YEAR = "PartitionOnCalendarYear" + PARTITION_ON_CALENDAR_MONTH = "PartitionOnCalendarMonth" + PARTITION_ON_CALENDAR_DATE = "PartitionOnCalendarDate" + PARTITION_ON_TIME = "PartitionOnTime" + class SchedulerCurrentState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): QUEUED = "Queued" @@ -537,6 +768,20 @@ class TeradataAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, E BASIC = "Basic" WINDOWS = "Windows" +class TeradataPartitionOption(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The partition mechanism that will be used for teradata read in parallel. + """ + + NONE = "None" + HASH = "Hash" + DYNAMIC_RANGE = "DynamicRange" + +class TriggerReferenceType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Trigger reference type. + """ + + TRIGGER_REFERENCE = "TriggerReference" + class TriggerRunStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Trigger run status. """ @@ -553,6 +798,13 @@ class TriggerRuntimeState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): STOPPED = "Stopped" DISABLED = "Disabled" +class TumblingWindowFrequency(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Enumerates possible frequency option for the tumbling window trigger. + """ + + MINUTE = "Minute" + HOUR = "Hour" + class Type(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Linked service reference type. """ diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_models.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_models.py index 6bb2ecb9828a..a9b3664d40af 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_models.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_models.py @@ -439,7 +439,7 @@ class Dataset(msrest.serialization.Model): """The Azure Data Factory nested object which identifies data within different data stores, such as tables, files, folders, and documents. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AmazonMWSObjectDataset, AmazonRedshiftTableDataset, AvroDataset, AzureDataExplorerTableDataset, AzureMariaDBTableDataset, AzureMySqlTableDataset, AzurePostgreSqlTableDataset, AzureSearchIndexDataset, AzureSqlDWTableDataset, AzureSqlMITableDataset, AzureSqlTableDataset, AzureTableDataset, BinaryDataset, CassandraTableDataset, CommonDataServiceForAppsEntityDataset, ConcurObjectDataset, CosmosDbMongoDbApiCollectionDataset, CosmosDbSqlApiCollectionDataset, CouchbaseTableDataset, Db2TableDataset, DelimitedTextDataset, DocumentDbCollectionDataset, DrillTableDataset, DynamicsAXResourceDataset, DynamicsCrmEntityDataset, DynamicsEntityDataset, EloquaObjectDataset, GoogleAdWordsObjectDataset, GoogleBigQueryObjectDataset, GreenplumTableDataset, HBaseObjectDataset, HiveObjectDataset, HubspotObjectDataset, ImpalaObjectDataset, InformixTableDataset, JiraObjectDataset, JsonDataset, MagentoObjectDataset, MariaDBTableDataset, MarketoObjectDataset, MicrosoftAccessTableDataset, MongoDbCollectionDataset, MongoDbV2CollectionDataset, MySqlTableDataset, NetezzaTableDataset, ODataResourceDataset, OdbcTableDataset, Office365Dataset, OracleServiceCloudObjectDataset, OracleTableDataset, OrcDataset, ParquetDataset, PaypalObjectDataset, PhoenixObjectDataset, PostgreSqlTableDataset, PrestoObjectDataset, QuickBooksObjectDataset, RelationalTableDataset, ResponsysObjectDataset, RestResourceDataset, SalesforceMarketingCloudObjectDataset, SalesforceObjectDataset, SalesforceServiceCloudObjectDataset, SapBwCubeDataset, SapCloudForCustomerResourceDataset, SapEccResourceDataset, SapHanaTableDataset, SapOpenHubTableDataset, SapTableResourceDataset, ServiceNowObjectDataset, ShopifyObjectDataset, SparkObjectDataset, SqlServerTableDataset, SquareObjectDataset, SybaseTableDataset, TeradataTableDataset, VerticaTableDataset, WebTableDataset, XeroObjectDataset, ZohoObjectDataset. + sub-classes are: AmazonMWSObjectDataset, AmazonRedshiftTableDataset, AvroDataset, AzureDataExplorerTableDataset, AzureMariaDBTableDataset, AzureMySqlTableDataset, AzurePostgreSqlTableDataset, AzureSearchIndexDataset, AzureSqlDWTableDataset, AzureSqlMITableDataset, AzureSqlTableDataset, AzureTableDataset, BinaryDataset, CassandraTableDataset, CommonDataServiceForAppsEntityDataset, ConcurObjectDataset, CosmosDbMongoDbApiCollectionDataset, CosmosDbSqlApiCollectionDataset, CouchbaseTableDataset, CustomDataset, Db2TableDataset, DelimitedTextDataset, DocumentDbCollectionDataset, DrillTableDataset, DynamicsAXResourceDataset, DynamicsCrmEntityDataset, DynamicsEntityDataset, EloquaObjectDataset, GoogleAdWordsObjectDataset, GoogleBigQueryObjectDataset, GreenplumTableDataset, HBaseObjectDataset, HiveObjectDataset, HubspotObjectDataset, ImpalaObjectDataset, InformixTableDataset, JiraObjectDataset, JsonDataset, MagentoObjectDataset, MariaDBTableDataset, MarketoObjectDataset, MicrosoftAccessTableDataset, MongoDbCollectionDataset, MongoDbV2CollectionDataset, MySqlTableDataset, NetezzaTableDataset, ODataResourceDataset, OdbcTableDataset, Office365Dataset, OracleServiceCloudObjectDataset, OracleTableDataset, OrcDataset, ParquetDataset, PaypalObjectDataset, PhoenixObjectDataset, PostgreSqlTableDataset, PrestoObjectDataset, QuickBooksObjectDataset, RelationalTableDataset, ResponsysObjectDataset, RestResourceDataset, SalesforceMarketingCloudObjectDataset, SalesforceObjectDataset, SalesforceServiceCloudObjectDataset, SapBwCubeDataset, SapCloudForCustomerResourceDataset, SapEccResourceDataset, SapHanaTableDataset, SapOpenHubTableDataset, SapTableResourceDataset, ServiceNowObjectDataset, ShopifyObjectDataset, SparkObjectDataset, SqlServerTableDataset, SquareObjectDataset, SybaseTableDataset, TeradataTableDataset, VerticaTableDataset, WebTableDataset, XeroObjectDataset, ZohoObjectDataset. All required parameters must be populated in order to send to Azure. @@ -485,7 +485,7 @@ class Dataset(msrest.serialization.Model): } _subtype_map = { - 'type': {'AmazonMWSObject': 'AmazonMWSObjectDataset', 'AmazonRedshiftTable': 'AmazonRedshiftTableDataset', 'Avro': 'AvroDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'AzureMariaDBTable': 'AzureMariaDBTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'Binary': 'BinaryDataset', 'CassandraTable': 'CassandraTableDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'ConcurObject': 'ConcurObjectDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'CosmosDbSqlApiCollection': 'CosmosDbSqlApiCollectionDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'Db2Table': 'Db2TableDataset', 'DelimitedText': 'DelimitedTextDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'DrillTable': 'DrillTableDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'EloquaObject': 'EloquaObjectDataset', 'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'HBaseObject': 'HBaseObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'InformixTable': 'InformixTableDataset', 'JiraObject': 'JiraObjectDataset', 'Json': 'JsonDataset', 'MagentoObject': 'MagentoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MarketoObject': 'MarketoObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MySqlTable': 'MySqlTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ODataResource': 'ODataResourceDataset', 'OdbcTable': 'OdbcTableDataset', 'Office365Table': 'Office365Dataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'OracleTable': 'OracleTableDataset', 'Orc': 'OrcDataset', 'Parquet': 'ParquetDataset', 'PaypalObject': 'PaypalObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'PrestoObject': 'PrestoObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'RestResource': 'RestResourceDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SapBwCube': 'SapBwCubeDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'SparkObject': 'SparkObjectDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SquareObject': 'SquareObjectDataset', 'SybaseTable': 'SybaseTableDataset', 'TeradataTable': 'TeradataTableDataset', 'VerticaTable': 'VerticaTableDataset', 'WebTable': 'WebTableDataset', 'XeroObject': 'XeroObjectDataset', 'ZohoObject': 'ZohoObjectDataset'} + 'type': {'AmazonMWSObject': 'AmazonMWSObjectDataset', 'AmazonRedshiftTable': 'AmazonRedshiftTableDataset', 'Avro': 'AvroDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'AzureMariaDBTable': 'AzureMariaDBTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'Binary': 'BinaryDataset', 'CassandraTable': 'CassandraTableDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'ConcurObject': 'ConcurObjectDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'CosmosDbSqlApiCollection': 'CosmosDbSqlApiCollectionDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'CustomDataset': 'CustomDataset', 'Db2Table': 'Db2TableDataset', 'DelimitedText': 'DelimitedTextDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'DrillTable': 'DrillTableDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'EloquaObject': 'EloquaObjectDataset', 'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'HBaseObject': 'HBaseObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'InformixTable': 'InformixTableDataset', 'JiraObject': 'JiraObjectDataset', 'Json': 'JsonDataset', 'MagentoObject': 'MagentoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MarketoObject': 'MarketoObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MySqlTable': 'MySqlTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ODataResource': 'ODataResourceDataset', 'OdbcTable': 'OdbcTableDataset', 'Office365Table': 'Office365Dataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'OracleTable': 'OracleTableDataset', 'Orc': 'OrcDataset', 'Parquet': 'ParquetDataset', 'PaypalObject': 'PaypalObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'PrestoObject': 'PrestoObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'RestResource': 'RestResourceDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SapBwCube': 'SapBwCubeDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'SparkObject': 'SparkObjectDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SquareObject': 'SquareObjectDataset', 'SybaseTable': 'SybaseTableDataset', 'TeradataTable': 'TeradataTableDataset', 'VerticaTable': 'VerticaTableDataset', 'WebTable': 'WebTableDataset', 'XeroObject': 'XeroObjectDataset', 'ZohoObject': 'ZohoObjectDataset'} } def __init__( @@ -562,6 +562,161 @@ def __init__( self.table_name = kwargs.get('table_name', None) +class CopySource(msrest.serialization.Model): + """A copy activity source. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AvroSource, AzureBlobFSSource, AzureDataExplorerSource, AzureDataLakeStoreSource, BinarySource, BlobSource, CommonDataServiceForAppsSource, CosmosDbMongoDbApiSource, CosmosDbSqlApiSource, DelimitedTextSource, DocumentDbCollectionSource, DynamicsCrmSource, DynamicsSource, FileSystemSource, HdfsSource, HttpSource, JsonSource, MicrosoftAccessSource, MongoDbSource, MongoDbV2Source, ODataSource, Office365Source, OracleSource, OrcSource, ParquetSource, RelationalSource, RestSource, SalesforceServiceCloudSource, TabularSource, WebSource. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + } + + _subtype_map = { + 'type': {'AvroSource': 'AvroSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'BinarySource': 'BinarySource', 'BlobSource': 'BlobSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'CosmosDbSqlApiSource': 'CosmosDbSqlApiSource', 'DelimitedTextSource': 'DelimitedTextSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'FileSystemSource': 'FileSystemSource', 'HdfsSource': 'HdfsSource', 'HttpSource': 'HttpSource', 'JsonSource': 'JsonSource', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'MongoDbSource': 'MongoDbSource', 'MongoDbV2Source': 'MongoDbV2Source', 'ODataSource': 'ODataSource', 'Office365Source': 'Office365Source', 'OracleSource': 'OracleSource', 'OrcSource': 'OrcSource', 'ParquetSource': 'ParquetSource', 'RelationalSource': 'RelationalSource', 'RestSource': 'RestSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'TabularSource': 'TabularSource', 'WebSource': 'WebSource'} + } + + def __init__( + self, + **kwargs + ): + super(CopySource, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = 'CopySource' # type: str + self.source_retry_count = kwargs.get('source_retry_count', None) + self.source_retry_wait = kwargs.get('source_retry_wait', None) + self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) + + +class TabularSource(CopySource): + """Copy activity sources of tabular type. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AmazonMWSSource, AmazonRedshiftSource, AzureMariaDBSource, AzureMySqlSource, AzurePostgreSqlSource, AzureSqlSource, AzureTableSource, CassandraSource, ConcurSource, CouchbaseSource, Db2Source, DrillSource, DynamicsAXSource, EloquaSource, GoogleAdWordsSource, GoogleBigQuerySource, GreenplumSource, HBaseSource, HiveSource, HubspotSource, ImpalaSource, InformixSource, JiraSource, MagentoSource, MariaDBSource, MarketoSource, MySqlSource, NetezzaSource, OdbcSource, OracleServiceCloudSource, PaypalSource, PhoenixSource, PostgreSqlSource, PrestoSource, QuickBooksSource, ResponsysSource, SalesforceMarketingCloudSource, SalesforceSource, SapBwSource, SapCloudForCustomerSource, SapEccSource, SapHanaSource, SapOpenHubSource, SapTableSource, ServiceNowSource, ShopifySource, SparkSource, SqlDWSource, SqlMISource, SqlServerSource, SqlSource, SquareSource, SybaseSource, TeradataSource, VerticaSource, XeroSource, ZohoSource. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + } + + _subtype_map = { + 'type': {'AmazonMWSSource': 'AmazonMWSSource', 'AmazonRedshiftSource': 'AmazonRedshiftSource', 'AzureMariaDBSource': 'AzureMariaDBSource', 'AzureMySqlSource': 'AzureMySqlSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AzureSqlSource': 'AzureSqlSource', 'AzureTableSource': 'AzureTableSource', 'CassandraSource': 'CassandraSource', 'ConcurSource': 'ConcurSource', 'CouchbaseSource': 'CouchbaseSource', 'Db2Source': 'Db2Source', 'DrillSource': 'DrillSource', 'DynamicsAXSource': 'DynamicsAXSource', 'EloquaSource': 'EloquaSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'GreenplumSource': 'GreenplumSource', 'HBaseSource': 'HBaseSource', 'HiveSource': 'HiveSource', 'HubspotSource': 'HubspotSource', 'ImpalaSource': 'ImpalaSource', 'InformixSource': 'InformixSource', 'JiraSource': 'JiraSource', 'MagentoSource': 'MagentoSource', 'MariaDBSource': 'MariaDBSource', 'MarketoSource': 'MarketoSource', 'MySqlSource': 'MySqlSource', 'NetezzaSource': 'NetezzaSource', 'OdbcSource': 'OdbcSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'PaypalSource': 'PaypalSource', 'PhoenixSource': 'PhoenixSource', 'PostgreSqlSource': 'PostgreSqlSource', 'PrestoSource': 'PrestoSource', 'QuickBooksSource': 'QuickBooksSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'SalesforceSource': 'SalesforceSource', 'SapBwSource': 'SapBwSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SapEccSource': 'SapEccSource', 'SapHanaSource': 'SapHanaSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapTableSource': 'SapTableSource', 'ServiceNowSource': 'ServiceNowSource', 'ShopifySource': 'ShopifySource', 'SparkSource': 'SparkSource', 'SqlDWSource': 'SqlDWSource', 'SqlMISource': 'SqlMISource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'SquareSource': 'SquareSource', 'SybaseSource': 'SybaseSource', 'TeradataSource': 'TeradataSource', 'VerticaSource': 'VerticaSource', 'XeroSource': 'XeroSource', 'ZohoSource': 'ZohoSource'} + } + + def __init__( + self, + **kwargs + ): + super(TabularSource, self).__init__(**kwargs) + self.type = 'TabularSource' # type: str + self.query_timeout = kwargs.get('query_timeout', None) + + +class AmazonMWSSource(TabularSource): + """A copy activity Amazon Marketplace Web Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(AmazonMWSSource, self).__init__(**kwargs) + self.type = 'AmazonMWSSource' # type: str + self.query = kwargs.get('query', None) + + class AmazonRedshiftLinkedService(LinkedService): """Linked service for Amazon Redshift. @@ -635,6 +790,61 @@ def __init__( self.encrypted_credential = kwargs.get('encrypted_credential', None) +class AmazonRedshiftSource(TabularSource): + """A copy activity source for Amazon Redshift Source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: Database query. Type: string (or Expression with resultType string). + :type query: object + :param redshift_unload_settings: The Amazon S3 settings needed for the interim Amazon S3 when + copying from Amazon Redshift with unload. With this, data from Amazon Redshift source will be + unloaded into S3 first and then copied into the targeted sink from the interim S3. + :type redshift_unload_settings: ~azure.synapse.artifacts.models.RedshiftUnloadSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + 'redshift_unload_settings': {'key': 'redshiftUnloadSettings', 'type': 'RedshiftUnloadSettings'}, + } + + def __init__( + self, + **kwargs + ): + super(AmazonRedshiftSource, self).__init__(**kwargs) + self.type = 'AmazonRedshiftSource' # type: str + self.query = kwargs.get('query', None) + self.redshift_unload_settings = kwargs.get('redshift_unload_settings', None) + + class AmazonRedshiftTableDataset(Dataset): """The Amazon Redshift table dataset. @@ -767,730 +977,698 @@ def __init__( self.encrypted_credential = kwargs.get('encrypted_credential', None) -class AppendVariableActivity(Activity): - """Append value for a Variable of type Array. +class DatasetLocation(msrest.serialization.Model): + """Dataset location. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AmazonS3Location, AzureBlobFSLocation, AzureBlobStorageLocation, AzureDataLakeStoreLocation, AzureFileStorageLocation, FileServerLocation, FtpServerLocation, GoogleCloudStorageLocation, HdfsLocation, HttpServerLocation, SftpLocation. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. Type of dataset storage location.Constant filled by server. :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param variable_name: Name of the variable whose value needs to be appended to. - :type variable_name: str - :param value: Value to be appended. Could be a static value or Expression. - :type value: object + :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + string). + :type file_name: object """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'variable_name': {'key': 'typeProperties.variableName', 'type': 'str'}, - 'value': {'key': 'typeProperties.value', 'type': 'object'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + _subtype_map = { + 'type': {'AmazonS3Location': 'AmazonS3Location', 'AzureBlobFSLocation': 'AzureBlobFSLocation', 'AzureBlobStorageLocation': 'AzureBlobStorageLocation', 'AzureDataLakeStoreLocation': 'AzureDataLakeStoreLocation', 'AzureFileStorageLocation': 'AzureFileStorageLocation', 'FileServerLocation': 'FileServerLocation', 'FtpServerLocation': 'FtpServerLocation', 'GoogleCloudStorageLocation': 'GoogleCloudStorageLocation', 'HdfsLocation': 'HdfsLocation', 'HttpServerLocation': 'HttpServerLocation', 'SftpLocation': 'SftpLocation'} } def __init__( self, **kwargs ): - super(AppendVariableActivity, self).__init__(**kwargs) - self.type = 'AppendVariable' # type: str - self.variable_name = kwargs.get('variable_name', None) - self.value = kwargs.get('value', None) + super(DatasetLocation, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = 'DatasetLocation' # type: str + self.folder_path = kwargs.get('folder_path', None) + self.file_name = kwargs.get('file_name', None) -class AvroDataset(Dataset): - """Avro dataset. +class AmazonS3Location(DatasetLocation): + """The location of amazon S3 dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Type of dataset storage location.Constant filled by server. :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param location: The location of the avro storage. - :type location: ~azure.synapse.artifacts.models.DatasetLocation - :param avro_compression_codec: Possible values include: "none", "deflate", "snappy", "xz", - "bzip2". - :type avro_compression_codec: str or ~azure.synapse.artifacts.models.AvroCompressionCodec - :param avro_compression_level: - :type avro_compression_level: int + :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + string). + :type file_name: object + :param bucket_name: Specify the bucketName of amazon S3. Type: string (or Expression with + resultType string). + :type bucket_name: object + :param version: Specify the version of amazon S3. Type: string (or Expression with resultType + string). + :type version: object """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, - 'avro_compression_level': {'maximum': 9, 'minimum': 1}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'avro_compression_codec': {'key': 'typeProperties.avroCompressionCodec', 'type': 'str'}, - 'avro_compression_level': {'key': 'typeProperties.avroCompressionLevel', 'type': 'int'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'bucket_name': {'key': 'bucketName', 'type': 'object'}, + 'version': {'key': 'version', 'type': 'object'}, } def __init__( self, **kwargs ): - super(AvroDataset, self).__init__(**kwargs) - self.type = 'Avro' # type: str - self.location = kwargs.get('location', None) - self.avro_compression_codec = kwargs.get('avro_compression_codec', None) - self.avro_compression_level = kwargs.get('avro_compression_level', None) + super(AmazonS3Location, self).__init__(**kwargs) + self.type = 'AmazonS3Location' # type: str + self.bucket_name = kwargs.get('bucket_name', None) + self.version = kwargs.get('version', None) -class AzureBatchLinkedService(LinkedService): - """Azure Batch linked service. +class StoreReadSettings(msrest.serialization.Model): + """Connector read setting. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AmazonS3ReadSettings, AzureBlobFSReadSettings, AzureBlobStorageReadSettings, AzureDataLakeStoreReadSettings, AzureFileStorageReadSettings, FileServerReadSettings, FtpReadSettings, GoogleCloudStorageReadSettings, HdfsReadSettings, HttpReadSettings, SftpReadSettings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. The read setting type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param account_name: Required. The Azure Batch account name. Type: string (or Expression with - resultType string). - :type account_name: object - :param access_key: The Azure Batch account access key. - :type access_key: ~azure.synapse.artifacts.models.SecretBase - :param batch_uri: Required. The Azure Batch URI. Type: string (or Expression with resultType - string). - :type batch_uri: object - :param pool_name: Required. The Azure Batch pool name. Type: string (or Expression with - resultType string). - :type pool_name: object - :param linked_service_name: Required. The Azure Storage linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object """ _validation = { 'type': {'required': True}, - 'account_name': {'required': True}, - 'batch_uri': {'required': True}, - 'pool_name': {'required': True}, - 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'account_name': {'key': 'typeProperties.accountName', 'type': 'object'}, - 'access_key': {'key': 'typeProperties.accessKey', 'type': 'SecretBase'}, - 'batch_uri': {'key': 'typeProperties.batchUri', 'type': 'object'}, - 'pool_name': {'key': 'typeProperties.poolName', 'type': 'object'}, - 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + } + + _subtype_map = { + 'type': {'AmazonS3ReadSettings': 'AmazonS3ReadSettings', 'AzureBlobFSReadSettings': 'AzureBlobFSReadSettings', 'AzureBlobStorageReadSettings': 'AzureBlobStorageReadSettings', 'AzureDataLakeStoreReadSettings': 'AzureDataLakeStoreReadSettings', 'AzureFileStorageReadSettings': 'AzureFileStorageReadSettings', 'FileServerReadSettings': 'FileServerReadSettings', 'FtpReadSettings': 'FtpReadSettings', 'GoogleCloudStorageReadSettings': 'GoogleCloudStorageReadSettings', 'HdfsReadSettings': 'HdfsReadSettings', 'HttpReadSettings': 'HttpReadSettings', 'SftpReadSettings': 'SftpReadSettings'} } def __init__( self, **kwargs ): - super(AzureBatchLinkedService, self).__init__(**kwargs) - self.type = 'AzureBatch' # type: str - self.account_name = kwargs['account_name'] - self.access_key = kwargs.get('access_key', None) - self.batch_uri = kwargs['batch_uri'] - self.pool_name = kwargs['pool_name'] - self.linked_service_name = kwargs['linked_service_name'] - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(StoreReadSettings, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = 'StoreReadSettings' # type: str + self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) -class AzureBlobFSLinkedService(LinkedService): - """Azure Data Lake Storage Gen2 linked service. +class AmazonS3ReadSettings(StoreReadSettings): + """Azure data lake store read settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. The read setting type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param url: Required. Endpoint for the Azure Data Lake Storage Gen2 service. Type: string (or - Expression with resultType string). - :type url: object - :param account_key: Account key for the Azure Data Lake Storage Gen2 service. Type: string (or - Expression with resultType string). - :type account_key: object - :param service_principal_id: The ID of the application used to authenticate against the Azure - Data Lake Storage Gen2 account. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The Key of the application used to authenticate against the Azure - Data Lake Storage Gen2 account. - :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal belongs. Type: - string (or Expression with resultType string). - :type tenant: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :type recursive: object + :param wildcard_folder_path: AmazonS3 wildcardFolderPath. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type wildcard_folder_path: object + :param wildcard_file_name: AmazonS3 wildcardFileName. Type: string (or Expression with + resultType string). + :type wildcard_file_name: object + :param prefix: The prefix filter for the S3 object name. Type: string (or Expression with + resultType string). + :type prefix: object + :param enable_partition_discovery: Indicates whether to enable partition discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. Type: string (or + Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression + with resultType string). + :type modified_datetime_end: object """ _validation = { 'type': {'required': True}, - 'url': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'account_key': {'key': 'typeProperties.accountKey', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'prefix': {'key': 'prefix', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } def __init__( self, **kwargs ): - super(AzureBlobFSLinkedService, self).__init__(**kwargs) - self.type = 'AzureBlobFS' # type: str - self.url = kwargs['url'] - self.account_key = kwargs.get('account_key', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.tenant = kwargs.get('tenant', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(AmazonS3ReadSettings, self).__init__(**kwargs) + self.type = 'AmazonS3ReadSettings' # type: str + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.prefix = kwargs.get('prefix', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) -class AzureBlobStorageLinkedService(LinkedService): - """The azure blob storage linked service. +class AppendVariableActivity(Activity): + """Append value for a Variable of type Array. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. + :param description: Activity description. :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: The connection string. It is mutually exclusive with sasUri, - serviceEndpoint property. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param account_key: The Azure key vault secret reference of accountKey in connection string. - :type account_key: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference - :param sas_uri: SAS URI of the Azure Blob Storage resource. It is mutually exclusive with - connectionString, serviceEndpoint property. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type sas_uri: object - :param sas_token: The Azure key vault secret reference of sasToken in sas uri. - :type sas_token: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference - :param service_endpoint: Blob service endpoint of the Azure Blob Storage resource. It is - mutually exclusive with connectionString, sasUri property. - :type service_endpoint: str - :param service_principal_id: The ID of the service principal used to authenticate against Azure - SQL Data Warehouse. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The key of the service principal used to authenticate against - Azure SQL Data Warehouse. - :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal belongs. Type: - string (or Expression with resultType string). - :type tenant: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param variable_name: Name of the variable whose value needs to be appended to. + :type variable_name: str + :param value: Value to be appended. Could be a static value or Expression. + :type value: object """ _validation = { + 'name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, - 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, - 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, - 'service_endpoint': {'key': 'typeProperties.serviceEndpoint', 'type': 'str'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'variable_name': {'key': 'typeProperties.variableName', 'type': 'str'}, + 'value': {'key': 'typeProperties.value', 'type': 'object'}, } def __init__( self, **kwargs ): - super(AzureBlobStorageLinkedService, self).__init__(**kwargs) - self.type = 'AzureBlobStorage' # type: str - self.connection_string = kwargs.get('connection_string', None) - self.account_key = kwargs.get('account_key', None) - self.sas_uri = kwargs.get('sas_uri', None) - self.sas_token = kwargs.get('sas_token', None) - self.service_endpoint = kwargs.get('service_endpoint', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.tenant = kwargs.get('tenant', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(AppendVariableActivity, self).__init__(**kwargs) + self.type = 'AppendVariable' # type: str + self.variable_name = kwargs.get('variable_name', None) + self.value = kwargs.get('value', None) -class AzureDatabricksLinkedService(LinkedService): - """Azure Databricks linked service. +class AutoPauseProperties(msrest.serialization.Model): + """Auto-pausing properties of a Big Data pool powered by Apache Spark. + + :param delay_in_minutes: Number of minutes of idle time before the Big Data pool is + automatically paused. + :type delay_in_minutes: int + :param enabled: Whether auto-pausing is enabled for the Big Data pool. + :type enabled: bool + """ + + _attribute_map = { + 'delay_in_minutes': {'key': 'delayInMinutes', 'type': 'int'}, + 'enabled': {'key': 'enabled', 'type': 'bool'}, + } + + def __init__( + self, + **kwargs + ): + super(AutoPauseProperties, self).__init__(**kwargs) + self.delay_in_minutes = kwargs.get('delay_in_minutes', None) + self.enabled = kwargs.get('enabled', None) + + +class AutoScaleProperties(msrest.serialization.Model): + """Auto-scaling properties of a Big Data pool powered by Apache Spark. + + :param min_node_count: The minimum number of nodes the Big Data pool can support. + :type min_node_count: int + :param enabled: Whether automatic scaling is enabled for the Big Data pool. + :type enabled: bool + :param max_node_count: The maximum number of nodes the Big Data pool can support. + :type max_node_count: int + """ + + _attribute_map = { + 'min_node_count': {'key': 'minNodeCount', 'type': 'int'}, + 'enabled': {'key': 'enabled', 'type': 'bool'}, + 'max_node_count': {'key': 'maxNodeCount', 'type': 'int'}, + } + + def __init__( + self, + **kwargs + ): + super(AutoScaleProperties, self).__init__(**kwargs) + self.min_node_count = kwargs.get('min_node_count', None) + self.enabled = kwargs.get('enabled', None) + self.max_node_count = kwargs.get('max_node_count', None) + + +class AvroDataset(Dataset): + """Avro dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Type of dataset.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. + :param description: Dataset description. :type description: str - :param parameters: Parameters for linked service. + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. + :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] - :param domain: Required. :code:``.azuredatabricks.net, domain name of your Databricks - deployment. Type: string (or Expression with resultType string). - :type domain: object - :param access_token: Required. Access token for databricks REST API. Refer to - https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string (or Expression - with resultType string). - :type access_token: ~azure.synapse.artifacts.models.SecretBase - :param existing_cluster_id: The id of an existing interactive cluster that will be used for all - runs of this activity. Type: string (or Expression with resultType string). - :type existing_cluster_id: object - :param instance_pool_id: The id of an existing instance pool that will be used for all runs of - this activity. Type: string (or Expression with resultType string). - :type instance_pool_id: object - :param new_cluster_version: If not using an existing interactive cluster, this specifies the - Spark version of a new job cluster or instance pool nodes created for each run of this - activity. Required if instancePoolId is specified. Type: string (or Expression with resultType - string). - :type new_cluster_version: object - :param new_cluster_num_of_worker: If not using an existing interactive cluster, this specifies - the number of worker nodes to use for the new job cluster or instance pool. For new job - clusters, this a string-formatted Int32, like '1' means numOfWorker is 1 or '1:10' means auto- - scale from 1 (min) to 10 (max). For instance pools, this is a string-formatted Int32, and can - only specify a fixed number of worker nodes, such as '2'. Required if newClusterVersion is - specified. Type: string (or Expression with resultType string). - :type new_cluster_num_of_worker: object - :param new_cluster_node_type: The node type of the new job cluster. This property is required - if newClusterVersion is specified and instancePoolId is not specified. If instancePoolId is - specified, this property is ignored. Type: string (or Expression with resultType string). - :type new_cluster_node_type: object - :param new_cluster_spark_conf: A set of optional, user-specified Spark configuration key-value - pairs. - :type new_cluster_spark_conf: dict[str, object] - :param new_cluster_spark_env_vars: A set of optional, user-specified Spark environment - variables key-value pairs. - :type new_cluster_spark_env_vars: dict[str, object] - :param new_cluster_custom_tags: Additional tags for cluster resources. This property is ignored - in instance pool configurations. - :type new_cluster_custom_tags: dict[str, object] - :param new_cluster_driver_node_type: The driver node type for the new job cluster. This - property is ignored in instance pool configurations. Type: string (or Expression with - resultType string). - :type new_cluster_driver_node_type: object - :param new_cluster_init_scripts: User-defined initialization scripts for the new cluster. Type: - array of strings (or Expression with resultType array of strings). - :type new_cluster_init_scripts: object - :param new_cluster_enable_elastic_disk: Enable the elastic disk on the new cluster. This - property is now ignored, and takes the default elastic disk behavior in Databricks (elastic - disks are always enabled). Type: boolean (or Expression with resultType boolean). - :type new_cluster_enable_elastic_disk: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param location: The location of the avro storage. + :type location: ~azure.synapse.artifacts.models.DatasetLocation + :param avro_compression_codec: Possible values include: "none", "deflate", "snappy", "xz", + "bzip2". + :type avro_compression_codec: str or ~azure.synapse.artifacts.models.AvroCompressionCodec + :param avro_compression_level: + :type avro_compression_level: int """ _validation = { 'type': {'required': True}, - 'domain': {'required': True}, - 'access_token': {'required': True}, + 'linked_service_name': {'required': True}, + 'avro_compression_level': {'maximum': 9, 'minimum': 1}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'domain': {'key': 'typeProperties.domain', 'type': 'object'}, - 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, - 'existing_cluster_id': {'key': 'typeProperties.existingClusterId', 'type': 'object'}, - 'instance_pool_id': {'key': 'typeProperties.instancePoolId', 'type': 'object'}, - 'new_cluster_version': {'key': 'typeProperties.newClusterVersion', 'type': 'object'}, - 'new_cluster_num_of_worker': {'key': 'typeProperties.newClusterNumOfWorker', 'type': 'object'}, - 'new_cluster_node_type': {'key': 'typeProperties.newClusterNodeType', 'type': 'object'}, - 'new_cluster_spark_conf': {'key': 'typeProperties.newClusterSparkConf', 'type': '{object}'}, - 'new_cluster_spark_env_vars': {'key': 'typeProperties.newClusterSparkEnvVars', 'type': '{object}'}, - 'new_cluster_custom_tags': {'key': 'typeProperties.newClusterCustomTags', 'type': '{object}'}, - 'new_cluster_driver_node_type': {'key': 'typeProperties.newClusterDriverNodeType', 'type': 'object'}, - 'new_cluster_init_scripts': {'key': 'typeProperties.newClusterInitScripts', 'type': 'object'}, - 'new_cluster_enable_elastic_disk': {'key': 'typeProperties.newClusterEnableElasticDisk', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'avro_compression_codec': {'key': 'typeProperties.avroCompressionCodec', 'type': 'str'}, + 'avro_compression_level': {'key': 'typeProperties.avroCompressionLevel', 'type': 'int'}, } def __init__( self, **kwargs ): - super(AzureDatabricksLinkedService, self).__init__(**kwargs) - self.type = 'AzureDatabricks' # type: str - self.domain = kwargs['domain'] - self.access_token = kwargs['access_token'] - self.existing_cluster_id = kwargs.get('existing_cluster_id', None) - self.instance_pool_id = kwargs.get('instance_pool_id', None) - self.new_cluster_version = kwargs.get('new_cluster_version', None) - self.new_cluster_num_of_worker = kwargs.get('new_cluster_num_of_worker', None) - self.new_cluster_node_type = kwargs.get('new_cluster_node_type', None) - self.new_cluster_spark_conf = kwargs.get('new_cluster_spark_conf', None) - self.new_cluster_spark_env_vars = kwargs.get('new_cluster_spark_env_vars', None) - self.new_cluster_custom_tags = kwargs.get('new_cluster_custom_tags', None) - self.new_cluster_driver_node_type = kwargs.get('new_cluster_driver_node_type', None) - self.new_cluster_init_scripts = kwargs.get('new_cluster_init_scripts', None) - self.new_cluster_enable_elastic_disk = kwargs.get('new_cluster_enable_elastic_disk', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(AvroDataset, self).__init__(**kwargs) + self.type = 'Avro' # type: str + self.location = kwargs.get('location', None) + self.avro_compression_codec = kwargs.get('avro_compression_codec', None) + self.avro_compression_level = kwargs.get('avro_compression_level', None) -class ExecutionActivity(Activity): - """Base class for all execution activities. +class DatasetStorageFormat(msrest.serialization.Model): + """The format definition of a storage. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureDataExplorerCommandActivity, AzureFunctionActivity, AzureMLBatchExecutionActivity, AzureMLExecutePipelineActivity, AzureMLUpdateResourceActivity, CopyActivity, CustomActivity, DataLakeAnalyticsUSQLActivity, DatabricksNotebookActivity, DatabricksSparkJarActivity, DatabricksSparkPythonActivity, DeleteActivity, ExecuteDataFlowActivity, ExecuteSSISPackageActivity, GetMetadataActivity, HDInsightHiveActivity, HDInsightMapReduceActivity, HDInsightPigActivity, HDInsightSparkActivity, HDInsightStreamingActivity, LookupActivity, SqlServerStoredProcedureActivity, WebActivity. + sub-classes are: AvroFormat, JsonFormat, OrcFormat, ParquetFormat, TextFormat. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. Type of dataset storage format.Constant filled by server. :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param serializer: Serializer. Type: string (or Expression with resultType string). + :type serializer: object + :param deserializer: Deserializer. Type: string (or Expression with resultType string). + :type deserializer: object """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'serializer': {'key': 'serializer', 'type': 'object'}, + 'deserializer': {'key': 'deserializer', 'type': 'object'}, } _subtype_map = { - 'type': {'AzureDataExplorerCommand': 'AzureDataExplorerCommandActivity', 'AzureFunctionActivity': 'AzureFunctionActivity', 'AzureMLBatchExecution': 'AzureMLBatchExecutionActivity', 'AzureMLExecutePipeline': 'AzureMLExecutePipelineActivity', 'AzureMLUpdateResource': 'AzureMLUpdateResourceActivity', 'Copy': 'CopyActivity', 'Custom': 'CustomActivity', 'DataLakeAnalyticsU-SQL': 'DataLakeAnalyticsUSQLActivity', 'DatabricksNotebook': 'DatabricksNotebookActivity', 'DatabricksSparkJar': 'DatabricksSparkJarActivity', 'DatabricksSparkPython': 'DatabricksSparkPythonActivity', 'Delete': 'DeleteActivity', 'ExecuteDataFlow': 'ExecuteDataFlowActivity', 'ExecuteSSISPackage': 'ExecuteSSISPackageActivity', 'GetMetadata': 'GetMetadataActivity', 'HDInsightHive': 'HDInsightHiveActivity', 'HDInsightMapReduce': 'HDInsightMapReduceActivity', 'HDInsightPig': 'HDInsightPigActivity', 'HDInsightSpark': 'HDInsightSparkActivity', 'HDInsightStreaming': 'HDInsightStreamingActivity', 'Lookup': 'LookupActivity', 'SqlServerStoredProcedure': 'SqlServerStoredProcedureActivity', 'WebActivity': 'WebActivity'} + 'type': {'AvroFormat': 'AvroFormat', 'JsonFormat': 'JsonFormat', 'OrcFormat': 'OrcFormat', 'ParquetFormat': 'ParquetFormat', 'TextFormat': 'TextFormat'} } def __init__( self, **kwargs ): - super(ExecutionActivity, self).__init__(**kwargs) - self.type = 'Execution' # type: str - self.linked_service_name = kwargs.get('linked_service_name', None) - self.policy = kwargs.get('policy', None) + super(DatasetStorageFormat, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = 'DatasetStorageFormat' # type: str + self.serializer = kwargs.get('serializer', None) + self.deserializer = kwargs.get('deserializer', None) -class AzureDataExplorerCommandActivity(ExecutionActivity): - """Azure Data Explorer command activity. +class AvroFormat(DatasetStorageFormat): + """The data stored in Avro format. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. Type of dataset storage format.Constant filled by server. :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param command: Required. A control command, according to the Azure Data Explorer command - syntax. Type: string (or Expression with resultType string). - :type command: object - :param command_timeout: Control command timeout. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))..). - :type command_timeout: object + :param serializer: Serializer. Type: string (or Expression with resultType string). + :type serializer: object + :param deserializer: Deserializer. Type: string (or Expression with resultType string). + :type deserializer: object """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, - 'command': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'command': {'key': 'typeProperties.command', 'type': 'object'}, - 'command_timeout': {'key': 'typeProperties.commandTimeout', 'type': 'object'}, + 'serializer': {'key': 'serializer', 'type': 'object'}, + 'deserializer': {'key': 'deserializer', 'type': 'object'}, } def __init__( self, **kwargs ): - super(AzureDataExplorerCommandActivity, self).__init__(**kwargs) - self.type = 'AzureDataExplorerCommand' # type: str - self.command = kwargs['command'] - self.command_timeout = kwargs.get('command_timeout', None) + super(AvroFormat, self).__init__(**kwargs) + self.type = 'AvroFormat' # type: str -class AzureDataExplorerLinkedService(LinkedService): - """Azure Data Explorer (Kusto) linked service. +class CopySink(msrest.serialization.Model): + """A copy activity sink. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AvroSink, AzureBlobFSSink, AzureDataExplorerSink, AzureDataLakeStoreSink, AzureMySqlSink, AzurePostgreSqlSink, AzureQueueSink, AzureSearchIndexSink, AzureSqlSink, AzureTableSink, BinarySink, BlobSink, CommonDataServiceForAppsSink, CosmosDbMongoDbApiSink, CosmosDbSqlApiSink, DelimitedTextSink, DocumentDbCollectionSink, DynamicsCrmSink, DynamicsSink, FileSystemSink, InformixSink, JsonSink, MicrosoftAccessSink, OdbcSink, OracleSink, OrcSink, ParquetSink, SalesforceServiceCloudSink, SalesforceSink, SapCloudForCustomerSink, SqlDWSink, SqlMISink, SqlServerSink, SqlSink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Copy sink type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param endpoint: Required. The endpoint of Azure Data Explorer (the engine's endpoint). URL - will be in the format https://:code:``.:code:``.kusto.windows.net. - Type: string (or Expression with resultType string). - :type endpoint: object - :param service_principal_id: Required. The ID of the service principal used to authenticate - against Azure Data Explorer. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: Required. The key of the service principal used to authenticate - against Kusto. - :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase - :param database: Required. Database name for connection. Type: string (or Expression with - resultType string). - :type database: object - :param tenant: Required. The name or ID of the tenant to which the service principal belongs. - Type: string (or Expression with resultType string). - :type tenant: object + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object """ _validation = { 'type': {'required': True}, - 'endpoint': {'required': True}, - 'service_principal_id': {'required': True}, - 'service_principal_key': {'required': True}, - 'database': {'required': True}, - 'tenant': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + } + + _subtype_map = { + 'type': {'AvroSink': 'AvroSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'AzureQueueSink': 'AzureQueueSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureSqlSink': 'AzureSqlSink', 'AzureTableSink': 'AzureTableSink', 'BinarySink': 'BinarySink', 'BlobSink': 'BlobSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'CosmosDbSqlApiSink': 'CosmosDbSqlApiSink', 'DelimitedTextSink': 'DelimitedTextSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'FileSystemSink': 'FileSystemSink', 'InformixSink': 'InformixSink', 'JsonSink': 'JsonSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'OdbcSink': 'OdbcSink', 'OracleSink': 'OracleSink', 'OrcSink': 'OrcSink', 'ParquetSink': 'ParquetSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink'} } def __init__( self, **kwargs ): - super(AzureDataExplorerLinkedService, self).__init__(**kwargs) - self.type = 'AzureDataExplorer' # type: str - self.endpoint = kwargs['endpoint'] - self.service_principal_id = kwargs['service_principal_id'] - self.service_principal_key = kwargs['service_principal_key'] - self.database = kwargs['database'] - self.tenant = kwargs['tenant'] + super(CopySink, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = 'CopySink' # type: str + self.write_batch_size = kwargs.get('write_batch_size', None) + self.write_batch_timeout = kwargs.get('write_batch_timeout', None) + self.sink_retry_count = kwargs.get('sink_retry_count', None) + self.sink_retry_wait = kwargs.get('sink_retry_wait', None) + self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) -class AzureDataExplorerTableDataset(Dataset): - """The Azure Data Explorer (Kusto) dataset. +class AvroSink(CopySink): + """A copy activity Avro sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Copy sink type.Constant filled by server. :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table: The table name of the Azure Data Explorer database. Type: string (or Expression - with resultType string). - :type table: object + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param store_settings: Avro store settings. + :type store_settings: ~azure.synapse.artifacts.models.StoreWriteSettings + :param format_settings: Avro format settings. + :type format_settings: ~azure.synapse.artifacts.models.AvroWriteSettings """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'AvroWriteSettings'}, } def __init__( self, **kwargs ): - super(AzureDataExplorerTableDataset, self).__init__(**kwargs) - self.type = 'AzureDataExplorerTable' # type: str - self.table = kwargs.get('table', None) + super(AvroSink, self).__init__(**kwargs) + self.type = 'AvroSink' # type: str + self.store_settings = kwargs.get('store_settings', None) + self.format_settings = kwargs.get('format_settings', None) -class AzureDataLakeAnalyticsLinkedService(LinkedService): - """Azure Data Lake Analytics linked service. +class AvroSource(CopySource): + """A copy activity Avro source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param store_settings: Avro store settings. + :type store_settings: ~azure.synapse.artifacts.models.StoreReadSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + } + + def __init__( + self, + **kwargs + ): + super(AvroSource, self).__init__(**kwargs) + self.type = 'AvroSource' # type: str + self.store_settings = kwargs.get('store_settings', None) + + +class FormatWriteSettings(msrest.serialization.Model): + """Format write settings. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AvroWriteSettings, DelimitedTextWriteSettings, JsonWriteSettings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The write setting type.Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'AvroWriteSettings': 'AvroWriteSettings', 'DelimitedTextWriteSettings': 'DelimitedTextWriteSettings', 'JsonWriteSettings': 'JsonWriteSettings'} + } + + def __init__( + self, + **kwargs + ): + super(FormatWriteSettings, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = 'FormatWriteSettings' # type: str + + +class AvroWriteSettings(FormatWriteSettings): + """Avro write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The write setting type.Constant filled by server. + :type type: str + :param record_name: Top level record name in write result, which is required in AVRO spec. + :type record_name: str + :param record_namespace: Record namespace in the write result. + :type record_namespace: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'record_name': {'key': 'recordName', 'type': 'str'}, + 'record_namespace': {'key': 'recordNamespace', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(AvroWriteSettings, self).__init__(**kwargs) + self.type = 'AvroWriteSettings' # type: str + self.record_name = kwargs.get('record_name', None) + self.record_namespace = kwargs.get('record_namespace', None) + + +class AzureBatchLinkedService(LinkedService): + """Azure Batch linked service. All required parameters must be populated in order to send to Azure. @@ -1507,27 +1685,19 @@ class AzureDataLakeAnalyticsLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param account_name: Required. The Azure Data Lake Analytics account name. Type: string (or - Expression with resultType string). + :param account_name: Required. The Azure Batch account name. Type: string (or Expression with + resultType string). :type account_name: object - :param service_principal_id: The ID of the application used to authenticate against the Azure - Data Lake Analytics account. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The Key of the application used to authenticate against the Azure - Data Lake Analytics account. - :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase - :param tenant: Required. The name or ID of the tenant to which the service principal belongs. - Type: string (or Expression with resultType string). - :type tenant: object - :param subscription_id: Data Lake Analytics account subscription ID (if different from Data - Factory account). Type: string (or Expression with resultType string). - :type subscription_id: object - :param resource_group_name: Data Lake Analytics account resource group name (if different from - Data Factory account). Type: string (or Expression with resultType string). - :type resource_group_name: object - :param data_lake_analytics_uri: Azure Data Lake Analytics URI Type: string (or Expression with + :param access_key: The Azure Batch account access key. + :type access_key: ~azure.synapse.artifacts.models.SecretBase + :param batch_uri: Required. The Azure Batch URI. Type: string (or Expression with resultType + string). + :type batch_uri: object + :param pool_name: Required. The Azure Batch pool name. Type: string (or Expression with resultType string). - :type data_lake_analytics_uri: object + :type pool_name: object + :param linked_service_name: Required. The Azure Storage linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -1537,7 +1707,9 @@ class AzureDataLakeAnalyticsLinkedService(LinkedService): _validation = { 'type': {'required': True}, 'account_name': {'required': True}, - 'tenant': {'required': True}, + 'batch_uri': {'required': True}, + 'pool_name': {'required': True}, + 'linked_service_name': {'required': True}, } _attribute_map = { @@ -1548,12 +1720,10 @@ class AzureDataLakeAnalyticsLinkedService(LinkedService): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'account_name': {'key': 'typeProperties.accountName', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'subscription_id': {'key': 'typeProperties.subscriptionId', 'type': 'object'}, - 'resource_group_name': {'key': 'typeProperties.resourceGroupName', 'type': 'object'}, - 'data_lake_analytics_uri': {'key': 'typeProperties.dataLakeAnalyticsUri', 'type': 'object'}, + 'access_key': {'key': 'typeProperties.accessKey', 'type': 'SecretBase'}, + 'batch_uri': {'key': 'typeProperties.batchUri', 'type': 'object'}, + 'pool_name': {'key': 'typeProperties.poolName', 'type': 'object'}, + 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -1561,20 +1731,18 @@ def __init__( self, **kwargs ): - super(AzureDataLakeAnalyticsLinkedService, self).__init__(**kwargs) - self.type = 'AzureDataLakeAnalytics' # type: str + super(AzureBatchLinkedService, self).__init__(**kwargs) + self.type = 'AzureBatch' # type: str self.account_name = kwargs['account_name'] - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.tenant = kwargs['tenant'] - self.subscription_id = kwargs.get('subscription_id', None) - self.resource_group_name = kwargs.get('resource_group_name', None) - self.data_lake_analytics_uri = kwargs.get('data_lake_analytics_uri', None) + self.access_key = kwargs.get('access_key', None) + self.batch_uri = kwargs['batch_uri'] + self.pool_name = kwargs['pool_name'] + self.linked_service_name = kwargs['linked_service_name'] self.encrypted_credential = kwargs.get('encrypted_credential', None) -class AzureDataLakeStoreLinkedService(LinkedService): - """Azure Data Lake Store linked service. +class AzureBlobFSLinkedService(LinkedService): + """Azure Data Lake Storage Gen2 linked service. All required parameters must be populated in order to send to Azure. @@ -1591,27 +1759,21 @@ class AzureDataLakeStoreLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param data_lake_store_uri: Required. Data Lake Store service URI. Type: string (or Expression - with resultType string). - :type data_lake_store_uri: object + :param url: Required. Endpoint for the Azure Data Lake Storage Gen2 service. Type: string (or + Expression with resultType string). + :type url: object + :param account_key: Account key for the Azure Data Lake Storage Gen2 service. Type: string (or + Expression with resultType string). + :type account_key: object :param service_principal_id: The ID of the application used to authenticate against the Azure - Data Lake Store account. Type: string (or Expression with resultType string). + Data Lake Storage Gen2 account. Type: string (or Expression with resultType string). :type service_principal_id: object :param service_principal_key: The Key of the application used to authenticate against the Azure - Data Lake Store account. + Data Lake Storage Gen2 account. :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). :type tenant: object - :param account_name: Data Lake Store account name. Type: string (or Expression with resultType - string). - :type account_name: object - :param subscription_id: Data Lake Store account subscription ID (if different from Data Factory - account). Type: string (or Expression with resultType string). - :type subscription_id: object - :param resource_group_name: Data Lake Store account resource group name (if different from Data - Factory account). Type: string (or Expression with resultType string). - :type resource_group_name: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -1620,7 +1782,7 @@ class AzureDataLakeStoreLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'data_lake_store_uri': {'required': True}, + 'url': {'required': True}, } _attribute_map = { @@ -1630,13 +1792,11 @@ class AzureDataLakeStoreLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'data_lake_store_uri': {'key': 'typeProperties.dataLakeStoreUri', 'type': 'object'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'account_key': {'key': 'typeProperties.accountKey', 'type': 'object'}, 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'account_name': {'key': 'typeProperties.accountName', 'type': 'object'}, - 'subscription_id': {'key': 'typeProperties.subscriptionId', 'type': 'object'}, - 'resource_group_name': {'key': 'typeProperties.resourceGroupName', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -1644,265 +1804,247 @@ def __init__( self, **kwargs ): - super(AzureDataLakeStoreLinkedService, self).__init__(**kwargs) - self.type = 'AzureDataLakeStore' # type: str - self.data_lake_store_uri = kwargs['data_lake_store_uri'] + super(AzureBlobFSLinkedService, self).__init__(**kwargs) + self.type = 'AzureBlobFS' # type: str + self.url = kwargs['url'] + self.account_key = kwargs.get('account_key', None) self.service_principal_id = kwargs.get('service_principal_id', None) self.service_principal_key = kwargs.get('service_principal_key', None) self.tenant = kwargs.get('tenant', None) - self.account_name = kwargs.get('account_name', None) - self.subscription_id = kwargs.get('subscription_id', None) - self.resource_group_name = kwargs.get('resource_group_name', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) -class AzureFileStorageLinkedService(LinkedService): - """Azure File Storage linked service. +class AzureBlobFSLocation(DatasetLocation): + """The location of azure blobFS dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Type of dataset storage location.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param host: Required. Host name of the server. Type: string (or Expression with resultType - string). - :type host: object - :param user_id: User ID to logon the server. Type: string (or Expression with resultType + :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :type user_id: object - :param password: Password to logon the server. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with + :type file_name: object + :param file_system: Specify the fileSystem of azure blobFS. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type file_system: object """ _validation = { 'type': {'required': True}, - 'host': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'user_id': {'key': 'typeProperties.userId', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'file_system': {'key': 'fileSystem', 'type': 'object'}, } def __init__( self, **kwargs ): - super(AzureFileStorageLinkedService, self).__init__(**kwargs) - self.type = 'AzureFileStorage' # type: str - self.host = kwargs['host'] - self.user_id = kwargs.get('user_id', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(AzureBlobFSLocation, self).__init__(**kwargs) + self.type = 'AzureBlobFSLocation' # type: str + self.file_system = kwargs.get('file_system', None) -class AzureFunctionActivity(ExecutionActivity): - """Azure Function activity. +class AzureBlobFSReadSettings(StoreReadSettings): + """Azure blobFS read settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. The read setting type.Constant filled by server. :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param method: Required. Rest API method for target endpoint. Possible values include: "GET", - "POST", "PUT", "DELETE", "OPTIONS", "HEAD", "TRACE". - :type method: str or ~azure.synapse.artifacts.models.AzureFunctionActivityMethod - :param function_name: Required. Name of the Function that the Azure Function Activity will - call. Type: string (or Expression with resultType string). - :type function_name: object - :param headers: Represents the headers that will be sent to the request. For example, to set - the language and type on a request: "headers" : { "Accept-Language": "en-us", "Content-Type": - "application/json" }. Type: string (or Expression with resultType string). - :type headers: object - :param body: Represents the payload that will be sent to the endpoint. Required for POST/PUT - method, not allowed for GET method Type: string (or Expression with resultType string). - :type body: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :type recursive: object + :param wildcard_folder_path: Azure blobFS wildcardFolderPath. Type: string (or Expression with + resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Azure blobFS wildcardFileName. Type: string (or Expression with + resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. Type: string (or + Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression + with resultType string). + :type modified_datetime_end: object """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, - 'method': {'required': True}, - 'function_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'method': {'key': 'typeProperties.method', 'type': 'str'}, - 'function_name': {'key': 'typeProperties.functionName', 'type': 'object'}, - 'headers': {'key': 'typeProperties.headers', 'type': 'object'}, - 'body': {'key': 'typeProperties.body', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } def __init__( self, **kwargs ): - super(AzureFunctionActivity, self).__init__(**kwargs) - self.type = 'AzureFunctionActivity' # type: str - self.method = kwargs['method'] - self.function_name = kwargs['function_name'] - self.headers = kwargs.get('headers', None) - self.body = kwargs.get('body', None) + super(AzureBlobFSReadSettings, self).__init__(**kwargs) + self.type = 'AzureBlobFSReadSettings' # type: str + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) -class AzureFunctionLinkedService(LinkedService): - """Azure Function linked service. +class AzureBlobFSSink(CopySink): + """A copy activity Azure Data Lake Storage Gen2 sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Copy sink type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param function_app_url: Required. The endpoint of the Azure Function App. URL will be in the - format https://:code:``.azurewebsites.net. - :type function_app_url: object - :param function_key: Function or Host key for Azure Function App. - :type function_key: ~azure.synapse.artifacts.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object """ _validation = { 'type': {'required': True}, - 'function_app_url': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'function_app_url': {'key': 'typeProperties.functionAppUrl', 'type': 'object'}, - 'function_key': {'key': 'typeProperties.functionKey', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } def __init__( self, **kwargs ): - super(AzureFunctionLinkedService, self).__init__(**kwargs) - self.type = 'AzureFunction' # type: str - self.function_app_url = kwargs['function_app_url'] - self.function_key = kwargs.get('function_key', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(AzureBlobFSSink, self).__init__(**kwargs) + self.type = 'AzureBlobFSSink' # type: str + self.copy_behavior = kwargs.get('copy_behavior', None) -class AzureKeyVaultLinkedService(LinkedService): - """Azure Key Vault linked service. +class AzureBlobFSSource(CopySource): + """A copy activity Azure BlobFS source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param base_url: Required. The base URL of the Azure Key Vault. e.g. - https://myakv.vault.azure.net Type: string (or Expression with resultType string). - :type base_url: object + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param treat_empty_as_null: Treat empty as null. Type: boolean (or Expression with resultType + boolean). + :type treat_empty_as_null: object + :param skip_header_line_count: Number of header lines to skip from each blob. Type: integer (or + Expression with resultType integer). + :type skip_header_line_count: object + :param recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :type recursive: object """ _validation = { 'type': {'required': True}, - 'base_url': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'base_url': {'key': 'typeProperties.baseUrl', 'type': 'object'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, + 'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, } def __init__( self, **kwargs ): - super(AzureKeyVaultLinkedService, self).__init__(**kwargs) - self.type = 'AzureKeyVault' # type: str - self.base_url = kwargs['base_url'] + super(AzureBlobFSSource, self).__init__(**kwargs) + self.type = 'AzureBlobFSSource' # type: str + self.treat_empty_as_null = kwargs.get('treat_empty_as_null', None) + self.skip_header_line_count = kwargs.get('skip_header_line_count', None) + self.recursive = kwargs.get('recursive', None) -class SecretBase(msrest.serialization.Model): - """The base definition of a secret type. +class StoreWriteSettings(msrest.serialization.Model): + """Connector write settings. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureKeyVaultSecretReference, SecureString. + sub-classes are: AzureBlobFSWriteSettings, AzureBlobStorageWriteSettings, AzureDataLakeStoreWriteSettings, FileServerWriteSettings, SftpWriteSettings. All required parameters must be populated in order to send to Azure. - :param type: Required. Type of the secret.Constant filled by server. + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The write setting type.Constant filled by server. :type type: str + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object """ _validation = { @@ -1910,64 +2052,70 @@ class SecretBase(msrest.serialization.Model): } _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } _subtype_map = { - 'type': {'AzureKeyVaultSecret': 'AzureKeyVaultSecretReference', 'SecureString': 'SecureString'} + 'type': {'AzureBlobFSWriteSettings': 'AzureBlobFSWriteSettings', 'AzureBlobStorageWriteSettings': 'AzureBlobStorageWriteSettings', 'AzureDataLakeStoreWriteSettings': 'AzureDataLakeStoreWriteSettings', 'FileServerWriteSettings': 'FileServerWriteSettings', 'SftpWriteSettings': 'SftpWriteSettings'} } def __init__( self, **kwargs ): - super(SecretBase, self).__init__(**kwargs) - self.type = None # type: Optional[str] + super(StoreWriteSettings, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = 'StoreWriteSettings' # type: str + self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) + self.copy_behavior = kwargs.get('copy_behavior', None) -class AzureKeyVaultSecretReference(SecretBase): - """Azure Key Vault secret reference. +class AzureBlobFSWriteSettings(StoreWriteSettings): + """Azure blobFS write settings. All required parameters must be populated in order to send to Azure. - :param type: Required. Type of the secret.Constant filled by server. + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The write setting type.Constant filled by server. :type type: str - :param store: Required. The Azure Key Vault linked service reference. - :type store: ~azure.synapse.artifacts.models.LinkedServiceReference - :param secret_name: Required. The name of the secret in Azure Key Vault. Type: string (or - Expression with resultType string). - :type secret_name: object - :param secret_version: The version of the secret in Azure Key Vault. The default value is the - latest version of the secret. Type: string (or Expression with resultType string). - :type secret_version: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + :param block_size_in_mb: Indicates the block size(MB) when writing data to blob. Type: integer + (or Expression with resultType integer). + :type block_size_in_mb: object """ _validation = { 'type': {'required': True}, - 'store': {'required': True}, - 'secret_name': {'required': True}, } _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'store': {'key': 'store', 'type': 'LinkedServiceReference'}, - 'secret_name': {'key': 'secretName', 'type': 'object'}, - 'secret_version': {'key': 'secretVersion', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'block_size_in_mb': {'key': 'blockSizeInMB', 'type': 'object'}, } def __init__( self, **kwargs ): - super(AzureKeyVaultSecretReference, self).__init__(**kwargs) - self.type = 'AzureKeyVaultSecret' # type: str - self.store = kwargs['store'] - self.secret_name = kwargs['secret_name'] - self.secret_version = kwargs.get('secret_version', None) + super(AzureBlobFSWriteSettings, self).__init__(**kwargs) + self.type = 'AzureBlobFSWriteSettings' # type: str + self.block_size_in_mb = kwargs.get('block_size_in_mb', None) -class AzureMariaDBLinkedService(LinkedService): - """Azure Database for MariaDB linked service. +class AzureBlobStorageLinkedService(LinkedService): + """The azure blob storage linked service. All required parameters must be populated in order to send to Azure. @@ -1984,15 +2132,33 @@ class AzureMariaDBLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param connection_string: An ODBC connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. + :param connection_string: The connection string. It is mutually exclusive with sasUri, + serviceEndpoint property. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object - :param pwd: The Azure key vault secret reference of password in connection string. - :type pwd: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param account_key: The Azure key vault secret reference of accountKey in connection string. + :type account_key: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param sas_uri: SAS URI of the Azure Blob Storage resource. It is mutually exclusive with + connectionString, serviceEndpoint property. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type sas_uri: object + :param sas_token: The Azure key vault secret reference of sasToken in sas uri. + :type sas_token: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param service_endpoint: Blob service endpoint of the Azure Blob Storage resource. It is + mutually exclusive with connectionString, sasUri property. + :type service_endpoint: str + :param service_principal_id: The ID of the service principal used to authenticate against Azure + SQL Data Warehouse. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The key of the service principal used to authenticate against + Azure SQL Data Warehouse. + :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal belongs. Type: + string (or Expression with resultType string). + :type tenant: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: str """ _validation = { @@ -2007,230 +2173,185 @@ class AzureMariaDBLinkedService(LinkedService): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, + 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, + 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, + 'service_endpoint': {'key': 'typeProperties.serviceEndpoint', 'type': 'str'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, } def __init__( self, **kwargs ): - super(AzureMariaDBLinkedService, self).__init__(**kwargs) - self.type = 'AzureMariaDB' # type: str + super(AzureBlobStorageLinkedService, self).__init__(**kwargs) + self.type = 'AzureBlobStorage' # type: str self.connection_string = kwargs.get('connection_string', None) - self.pwd = kwargs.get('pwd', None) + self.account_key = kwargs.get('account_key', None) + self.sas_uri = kwargs.get('sas_uri', None) + self.sas_token = kwargs.get('sas_token', None) + self.service_endpoint = kwargs.get('service_endpoint', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs.get('tenant', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) -class AzureMariaDBTableDataset(Dataset): - """Azure Database for MariaDB dataset. +class AzureBlobStorageLocation(DatasetLocation): + """The location of azure blob dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Type of dataset storage location.Constant filled by server. :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + string). + :type file_name: object + :param container: Specify the container of azure blob. Type: string (or Expression with + resultType string). + :type container: object """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'container': {'key': 'container', 'type': 'object'}, } def __init__( self, **kwargs ): - super(AzureMariaDBTableDataset, self).__init__(**kwargs) - self.type = 'AzureMariaDBTable' # type: str - self.table_name = kwargs.get('table_name', None) + super(AzureBlobStorageLocation, self).__init__(**kwargs) + self.type = 'AzureBlobStorageLocation' # type: str + self.container = kwargs.get('container', None) -class AzureMLBatchExecutionActivity(ExecutionActivity): - """Azure ML Batch Execution activity. +class AzureBlobStorageReadSettings(StoreReadSettings): + """Azure blob read settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. The read setting type.Constant filled by server. :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param global_parameters: Key,Value pairs to be passed to the Azure ML Batch Execution Service - endpoint. Keys must match the names of web service parameters defined in the published Azure ML - web service. Values will be passed in the GlobalParameters property of the Azure ML batch - execution request. - :type global_parameters: dict[str, object] - :param web_service_outputs: Key,Value pairs, mapping the names of Azure ML endpoint's Web - Service Outputs to AzureMLWebServiceFile objects specifying the output Blob locations. This - information will be passed in the WebServiceOutputs property of the Azure ML batch execution - request. - :type web_service_outputs: dict[str, ~azure.synapse.artifacts.models.AzureMLWebServiceFile] - :param web_service_inputs: Key,Value pairs, mapping the names of Azure ML endpoint's Web - Service Inputs to AzureMLWebServiceFile objects specifying the input Blob locations.. This - information will be passed in the WebServiceInputs property of the Azure ML batch execution - request. - :type web_service_inputs: dict[str, ~azure.synapse.artifacts.models.AzureMLWebServiceFile] + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :type recursive: object + :param wildcard_folder_path: Azure blob wildcardFolderPath. Type: string (or Expression with + resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Azure blob wildcardFileName. Type: string (or Expression with + resultType string). + :type wildcard_file_name: object + :param prefix: The prefix filter for the Azure Blob name. Type: string (or Expression with + resultType string). + :type prefix: object + :param enable_partition_discovery: Indicates whether to enable partition discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. Type: string (or + Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression + with resultType string). + :type modified_datetime_end: object """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'global_parameters': {'key': 'typeProperties.globalParameters', 'type': '{object}'}, - 'web_service_outputs': {'key': 'typeProperties.webServiceOutputs', 'type': '{AzureMLWebServiceFile}'}, - 'web_service_inputs': {'key': 'typeProperties.webServiceInputs', 'type': '{AzureMLWebServiceFile}'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'prefix': {'key': 'prefix', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } def __init__( self, **kwargs ): - super(AzureMLBatchExecutionActivity, self).__init__(**kwargs) - self.type = 'AzureMLBatchExecution' # type: str - self.global_parameters = kwargs.get('global_parameters', None) - self.web_service_outputs = kwargs.get('web_service_outputs', None) - self.web_service_inputs = kwargs.get('web_service_inputs', None) + super(AzureBlobStorageReadSettings, self).__init__(**kwargs) + self.type = 'AzureBlobStorageReadSettings' # type: str + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.prefix = kwargs.get('prefix', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) -class AzureMLExecutePipelineActivity(ExecutionActivity): - """Azure ML Execute Pipeline activity. +class AzureBlobStorageWriteSettings(StoreWriteSettings): + """Azure blob write settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. The write setting type.Constant filled by server. :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param ml_pipeline_id: Required. ID of the published Azure ML pipeline. Type: string (or - Expression with resultType string). - :type ml_pipeline_id: object - :param experiment_name: Run history experiment name of the pipeline run. This information will - be passed in the ExperimentName property of the published pipeline execution request. Type: - string (or Expression with resultType string). - :type experiment_name: object - :param ml_pipeline_parameters: Key,Value pairs to be passed to the published Azure ML pipeline - endpoint. Keys must match the names of pipeline parameters defined in the published pipeline. - Values will be passed in the ParameterAssignments property of the published pipeline execution - request. Type: object with key value pairs (or Expression with resultType object). - :type ml_pipeline_parameters: object - :param ml_parent_run_id: The parent Azure ML Service pipeline run id. This information will be - passed in the ParentRunId property of the published pipeline execution request. Type: string - (or Expression with resultType string). - :type ml_parent_run_id: object - :param continue_on_step_failure: Whether to continue execution of other steps in the - PipelineRun if a step fails. This information will be passed in the continueOnStepFailure - property of the published pipeline execution request. Type: boolean (or Expression with - resultType boolean). - :type continue_on_step_failure: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + :param block_size_in_mb: Indicates the block size(MB) when writing data to blob. Type: integer + (or Expression with resultType integer). + :type block_size_in_mb: object """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, - 'ml_pipeline_id': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'ml_pipeline_id': {'key': 'typeProperties.mlPipelineId', 'type': 'object'}, - 'experiment_name': {'key': 'typeProperties.experimentName', 'type': 'object'}, - 'ml_pipeline_parameters': {'key': 'typeProperties.mlPipelineParameters', 'type': 'object'}, - 'ml_parent_run_id': {'key': 'typeProperties.mlParentRunId', 'type': 'object'}, - 'continue_on_step_failure': {'key': 'typeProperties.continueOnStepFailure', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'block_size_in_mb': {'key': 'blockSizeInMB', 'type': 'object'}, } def __init__( self, **kwargs ): - super(AzureMLExecutePipelineActivity, self).__init__(**kwargs) - self.type = 'AzureMLExecutePipeline' # type: str - self.ml_pipeline_id = kwargs['ml_pipeline_id'] - self.experiment_name = kwargs.get('experiment_name', None) - self.ml_pipeline_parameters = kwargs.get('ml_pipeline_parameters', None) - self.ml_parent_run_id = kwargs.get('ml_parent_run_id', None) - self.continue_on_step_failure = kwargs.get('continue_on_step_failure', None) + super(AzureBlobStorageWriteSettings, self).__init__(**kwargs) + self.type = 'AzureBlobStorageWriteSettings' # type: str + self.block_size_in_mb = kwargs.get('block_size_in_mb', None) -class AzureMLLinkedService(LinkedService): - """Azure ML Studio Web Service linked service. +class AzureDatabricksLinkedService(LinkedService): + """Azure Databricks linked service. All required parameters must be populated in order to send to Azure. @@ -2247,104 +2368,55 @@ class AzureMLLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param ml_endpoint: Required. The Batch Execution REST URL for an Azure ML Studio Web Service - endpoint. Type: string (or Expression with resultType string). - :type ml_endpoint: object - :param api_key: Required. The API key for accessing the Azure ML model endpoint. - :type api_key: ~azure.synapse.artifacts.models.SecretBase - :param update_resource_endpoint: The Update Resource REST URL for an Azure ML Studio Web - Service endpoint. Type: string (or Expression with resultType string). - :type update_resource_endpoint: object - :param service_principal_id: The ID of the service principal used to authenticate against the - ARM-based updateResourceEndpoint of an Azure ML Studio web service. Type: string (or Expression + :param domain: Required. :code:``.azuredatabricks.net, domain name of your Databricks + deployment. Type: string (or Expression with resultType string). + :type domain: object + :param access_token: Required. Access token for databricks REST API. Refer to + https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The key of the service principal used to authenticate against the - ARM-based updateResourceEndpoint of an Azure ML Studio web service. - :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal belongs. Type: - string (or Expression with resultType string). - :type tenant: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'ml_endpoint': {'required': True}, - 'api_key': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'ml_endpoint': {'key': 'typeProperties.mlEndpoint', 'type': 'object'}, - 'api_key': {'key': 'typeProperties.apiKey', 'type': 'SecretBase'}, - 'update_resource_endpoint': {'key': 'typeProperties.updateResourceEndpoint', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureMLLinkedService, self).__init__(**kwargs) - self.type = 'AzureML' # type: str - self.ml_endpoint = kwargs['ml_endpoint'] - self.api_key = kwargs['api_key'] - self.update_resource_endpoint = kwargs.get('update_resource_endpoint', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.tenant = kwargs.get('tenant', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class AzureMLServiceLinkedService(LinkedService): - """Azure ML Service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param subscription_id: Required. Azure ML Service workspace subscription ID. Type: string (or - Expression with resultType string). - :type subscription_id: object - :param resource_group_name: Required. Azure ML Service workspace resource group name. Type: - string (or Expression with resultType string). - :type resource_group_name: object - :param ml_workspace_name: Required. Azure ML Service workspace name. Type: string (or - Expression with resultType string). - :type ml_workspace_name: object - :param service_principal_id: The ID of the service principal used to authenticate against the - endpoint of a published Azure ML Service pipeline. Type: string (or Expression with resultType + :type access_token: ~azure.synapse.artifacts.models.SecretBase + :param existing_cluster_id: The id of an existing interactive cluster that will be used for all + runs of this activity. Type: string (or Expression with resultType string). + :type existing_cluster_id: object + :param instance_pool_id: The id of an existing instance pool that will be used for all runs of + this activity. Type: string (or Expression with resultType string). + :type instance_pool_id: object + :param new_cluster_version: If not using an existing interactive cluster, this specifies the + Spark version of a new job cluster or instance pool nodes created for each run of this + activity. Required if instancePoolId is specified. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The key of the service principal used to authenticate against the - endpoint of a published Azure ML Service pipeline. - :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal belongs. Type: - string (or Expression with resultType string). - :type tenant: object + :type new_cluster_version: object + :param new_cluster_num_of_worker: If not using an existing interactive cluster, this specifies + the number of worker nodes to use for the new job cluster or instance pool. For new job + clusters, this a string-formatted Int32, like '1' means numOfWorker is 1 or '1:10' means auto- + scale from 1 (min) to 10 (max). For instance pools, this is a string-formatted Int32, and can + only specify a fixed number of worker nodes, such as '2'. Required if newClusterVersion is + specified. Type: string (or Expression with resultType string). + :type new_cluster_num_of_worker: object + :param new_cluster_node_type: The node type of the new job cluster. This property is required + if newClusterVersion is specified and instancePoolId is not specified. If instancePoolId is + specified, this property is ignored. Type: string (or Expression with resultType string). + :type new_cluster_node_type: object + :param new_cluster_spark_conf: A set of optional, user-specified Spark configuration key-value + pairs. + :type new_cluster_spark_conf: dict[str, object] + :param new_cluster_spark_env_vars: A set of optional, user-specified Spark environment + variables key-value pairs. + :type new_cluster_spark_env_vars: dict[str, object] + :param new_cluster_custom_tags: Additional tags for cluster resources. This property is ignored + in instance pool configurations. + :type new_cluster_custom_tags: dict[str, object] + :param new_cluster_driver_node_type: The driver node type for the new job cluster. This + property is ignored in instance pool configurations. Type: string (or Expression with + resultType string). + :type new_cluster_driver_node_type: object + :param new_cluster_init_scripts: User-defined initialization scripts for the new cluster. Type: + array of strings (or Expression with resultType array of strings). + :type new_cluster_init_scripts: object + :param new_cluster_enable_elastic_disk: Enable the elastic disk on the new cluster. This + property is now ignored, and takes the default elastic disk behavior in Databricks (elastic + disks are always enabled). Type: boolean (or Expression with resultType boolean). + :type new_cluster_enable_elastic_disk: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -2353,9 +2425,8 @@ class AzureMLServiceLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'subscription_id': {'required': True}, - 'resource_group_name': {'required': True}, - 'ml_workspace_name': {'required': True}, + 'domain': {'required': True}, + 'access_token': {'required': True}, } _attribute_map = { @@ -2365,12 +2436,19 @@ class AzureMLServiceLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'subscription_id': {'key': 'typeProperties.subscriptionId', 'type': 'object'}, - 'resource_group_name': {'key': 'typeProperties.resourceGroupName', 'type': 'object'}, - 'ml_workspace_name': {'key': 'typeProperties.mlWorkspaceName', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'domain': {'key': 'typeProperties.domain', 'type': 'object'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'existing_cluster_id': {'key': 'typeProperties.existingClusterId', 'type': 'object'}, + 'instance_pool_id': {'key': 'typeProperties.instancePoolId', 'type': 'object'}, + 'new_cluster_version': {'key': 'typeProperties.newClusterVersion', 'type': 'object'}, + 'new_cluster_num_of_worker': {'key': 'typeProperties.newClusterNumOfWorker', 'type': 'object'}, + 'new_cluster_node_type': {'key': 'typeProperties.newClusterNodeType', 'type': 'object'}, + 'new_cluster_spark_conf': {'key': 'typeProperties.newClusterSparkConf', 'type': '{object}'}, + 'new_cluster_spark_env_vars': {'key': 'typeProperties.newClusterSparkEnvVars', 'type': '{object}'}, + 'new_cluster_custom_tags': {'key': 'typeProperties.newClusterCustomTags', 'type': '{object}'}, + 'new_cluster_driver_node_type': {'key': 'typeProperties.newClusterDriverNodeType', 'type': 'object'}, + 'new_cluster_init_scripts': {'key': 'typeProperties.newClusterInitScripts', 'type': 'object'}, + 'new_cluster_enable_elastic_disk': {'key': 'typeProperties.newClusterEnableElasticDisk', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -2378,19 +2456,29 @@ def __init__( self, **kwargs ): - super(AzureMLServiceLinkedService, self).__init__(**kwargs) - self.type = 'AzureMLService' # type: str - self.subscription_id = kwargs['subscription_id'] - self.resource_group_name = kwargs['resource_group_name'] - self.ml_workspace_name = kwargs['ml_workspace_name'] - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.tenant = kwargs.get('tenant', None) + super(AzureDatabricksLinkedService, self).__init__(**kwargs) + self.type = 'AzureDatabricks' # type: str + self.domain = kwargs['domain'] + self.access_token = kwargs['access_token'] + self.existing_cluster_id = kwargs.get('existing_cluster_id', None) + self.instance_pool_id = kwargs.get('instance_pool_id', None) + self.new_cluster_version = kwargs.get('new_cluster_version', None) + self.new_cluster_num_of_worker = kwargs.get('new_cluster_num_of_worker', None) + self.new_cluster_node_type = kwargs.get('new_cluster_node_type', None) + self.new_cluster_spark_conf = kwargs.get('new_cluster_spark_conf', None) + self.new_cluster_spark_env_vars = kwargs.get('new_cluster_spark_env_vars', None) + self.new_cluster_custom_tags = kwargs.get('new_cluster_custom_tags', None) + self.new_cluster_driver_node_type = kwargs.get('new_cluster_driver_node_type', None) + self.new_cluster_init_scripts = kwargs.get('new_cluster_init_scripts', None) + self.new_cluster_enable_elastic_disk = kwargs.get('new_cluster_enable_elastic_disk', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) -class AzureMLUpdateResourceActivity(ExecutionActivity): - """Azure ML Update Resource management activity. +class ExecutionActivity(Activity): + """Base class for all execution activities. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AzureDataExplorerCommandActivity, AzureFunctionActivity, AzureMLBatchExecutionActivity, AzureMLExecutePipelineActivity, AzureMLUpdateResourceActivity, CopyActivity, CustomActivity, DataLakeAnalyticsUSQLActivity, DatabricksNotebookActivity, DatabricksSparkJarActivity, DatabricksSparkPythonActivity, DeleteActivity, ExecuteDataFlowActivity, ExecuteSSISPackageActivity, GetMetadataActivity, HDInsightHiveActivity, HDInsightMapReduceActivity, HDInsightPigActivity, HDInsightSparkActivity, HDInsightStreamingActivity, LookupActivity, SqlServerStoredProcedureActivity, WebActivity. All required parameters must be populated in order to send to Azure. @@ -2411,24 +2499,11 @@ class AzureMLUpdateResourceActivity(ExecutionActivity): :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param trained_model_name: Required. Name of the Trained Model module in the Web Service - experiment to be updated. Type: string (or Expression with resultType string). - :type trained_model_name: object - :param trained_model_linked_service_name: Required. Name of Azure Storage linked service - holding the .ilearner file that will be uploaded by the update operation. - :type trained_model_linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param trained_model_file_path: Required. The relative file path in trainedModelLinkedService - to represent the .ilearner file that will be uploaded by the update operation. Type: string - (or Expression with resultType string). - :type trained_model_file_path: object """ _validation = { 'name': {'required': True}, 'type': {'required': True}, - 'trained_model_name': {'required': True}, - 'trained_model_linked_service_name': {'required': True}, - 'trained_model_file_path': {'required': True}, } _attribute_map = { @@ -2440,56 +2515,83 @@ class AzureMLUpdateResourceActivity(ExecutionActivity): 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'trained_model_name': {'key': 'typeProperties.trainedModelName', 'type': 'object'}, - 'trained_model_linked_service_name': {'key': 'typeProperties.trainedModelLinkedServiceName', 'type': 'LinkedServiceReference'}, - 'trained_model_file_path': {'key': 'typeProperties.trainedModelFilePath', 'type': 'object'}, + } + + _subtype_map = { + 'type': {'AzureDataExplorerCommand': 'AzureDataExplorerCommandActivity', 'AzureFunctionActivity': 'AzureFunctionActivity', 'AzureMLBatchExecution': 'AzureMLBatchExecutionActivity', 'AzureMLExecutePipeline': 'AzureMLExecutePipelineActivity', 'AzureMLUpdateResource': 'AzureMLUpdateResourceActivity', 'Copy': 'CopyActivity', 'Custom': 'CustomActivity', 'DataLakeAnalyticsU-SQL': 'DataLakeAnalyticsUSQLActivity', 'DatabricksNotebook': 'DatabricksNotebookActivity', 'DatabricksSparkJar': 'DatabricksSparkJarActivity', 'DatabricksSparkPython': 'DatabricksSparkPythonActivity', 'Delete': 'DeleteActivity', 'ExecuteDataFlow': 'ExecuteDataFlowActivity', 'ExecuteSSISPackage': 'ExecuteSSISPackageActivity', 'GetMetadata': 'GetMetadataActivity', 'HDInsightHive': 'HDInsightHiveActivity', 'HDInsightMapReduce': 'HDInsightMapReduceActivity', 'HDInsightPig': 'HDInsightPigActivity', 'HDInsightSpark': 'HDInsightSparkActivity', 'HDInsightStreaming': 'HDInsightStreamingActivity', 'Lookup': 'LookupActivity', 'SqlServerStoredProcedure': 'SqlServerStoredProcedureActivity', 'WebActivity': 'WebActivity'} } def __init__( self, **kwargs ): - super(AzureMLUpdateResourceActivity, self).__init__(**kwargs) - self.type = 'AzureMLUpdateResource' # type: str - self.trained_model_name = kwargs['trained_model_name'] - self.trained_model_linked_service_name = kwargs['trained_model_linked_service_name'] - self.trained_model_file_path = kwargs['trained_model_file_path'] + super(ExecutionActivity, self).__init__(**kwargs) + self.type = 'Execution' # type: str + self.linked_service_name = kwargs.get('linked_service_name', None) + self.policy = kwargs.get('policy', None) -class AzureMLWebServiceFile(msrest.serialization.Model): - """Azure ML WebService Input/Output file. +class AzureDataExplorerCommandActivity(ExecutionActivity): + """Azure Data Explorer command activity. All required parameters must be populated in order to send to Azure. - :param file_path: Required. The relative file path, including container name, in the Azure Blob - Storage specified by the LinkedService. Type: string (or Expression with resultType string). - :type file_path: object - :param linked_service_name: Required. Reference to an Azure Storage LinkedService, where Azure - ML WebService Input/Output file located. + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param command: Required. A control command, according to the Azure Data Explorer command + syntax. Type: string (or Expression with resultType string). + :type command: object + :param command_timeout: Control command timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))..). + :type command_timeout: object """ _validation = { - 'file_path': {'required': True}, - 'linked_service_name': {'required': True}, + 'name': {'required': True}, + 'type': {'required': True}, + 'command': {'required': True}, } _attribute_map = { - 'file_path': {'key': 'filePath', 'type': 'object'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'command': {'key': 'typeProperties.command', 'type': 'object'}, + 'command_timeout': {'key': 'typeProperties.commandTimeout', 'type': 'object'}, } def __init__( self, **kwargs ): - super(AzureMLWebServiceFile, self).__init__(**kwargs) - self.file_path = kwargs['file_path'] - self.linked_service_name = kwargs['linked_service_name'] + super(AzureDataExplorerCommandActivity, self).__init__(**kwargs) + self.type = 'AzureDataExplorerCommand' # type: str + self.command = kwargs['command'] + self.command_timeout = kwargs.get('command_timeout', None) -class AzureMySqlLinkedService(LinkedService): - """Azure MySQL database linked service. +class AzureDataExplorerLinkedService(LinkedService): + """Azure Data Explorer (Kusto) linked service. All required parameters must be populated in order to send to Azure. @@ -2506,20 +2608,31 @@ class AzureMySqlLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param connection_string: Required. The connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param password: The Azure key vault secret reference of password in connection string. - :type password: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - + :param endpoint: Required. The endpoint of Azure Data Explorer (the engine's endpoint). URL + will be in the format https://:code:``.:code:``.kusto.windows.net. + Type: string (or Expression with resultType string). + :type endpoint: object + :param service_principal_id: Required. The ID of the service principal used to authenticate + against Azure Data Explorer. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: Required. The key of the service principal used to authenticate + against Kusto. + :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase + :param database: Required. Database name for connection. Type: string (or Expression with + resultType string). + :type database: object + :param tenant: Required. The name or ID of the tenant to which the service principal belongs. + Type: string (or Expression with resultType string). + :type tenant: object + """ + _validation = { 'type': {'required': True}, - 'connection_string': {'required': True}, + 'endpoint': {'required': True}, + 'service_principal_id': {'required': True}, + 'service_principal_key': {'required': True}, + 'database': {'required': True}, + 'tenant': {'required': True}, } _attribute_map = { @@ -2529,144 +2642,149 @@ class AzureMySqlLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, } def __init__( self, **kwargs ): - super(AzureMySqlLinkedService, self).__init__(**kwargs) - self.type = 'AzureMySql' # type: str - self.connection_string = kwargs['connection_string'] - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(AzureDataExplorerLinkedService, self).__init__(**kwargs) + self.type = 'AzureDataExplorer' # type: str + self.endpoint = kwargs['endpoint'] + self.service_principal_id = kwargs['service_principal_id'] + self.service_principal_key = kwargs['service_principal_key'] + self.database = kwargs['database'] + self.tenant = kwargs['tenant'] -class AzureMySqlTableDataset(Dataset): - """The Azure MySQL database dataset. +class AzureDataExplorerSink(CopySink): + """A copy activity Azure Data Explorer sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Copy sink type.Constant filled by server. :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The Azure MySQL database table name. Type: string (or Expression with - resultType string). - :type table_name: object - :param table: The name of Azure MySQL database table. Type: string (or Expression with - resultType string). - :type table: object + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param ingestion_mapping_name: A name of a pre-created csv mapping that was defined on the + target Kusto table. Type: string. + :type ingestion_mapping_name: object + :param ingestion_mapping_as_json: An explicit column mapping description provided in a json + format. Type: string. + :type ingestion_mapping_as_json: object + :param flush_immediately: If set to true, any aggregation will be skipped. Default is false. + Type: boolean. + :type flush_immediately: object """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'ingestion_mapping_name': {'key': 'ingestionMappingName', 'type': 'object'}, + 'ingestion_mapping_as_json': {'key': 'ingestionMappingAsJson', 'type': 'object'}, + 'flush_immediately': {'key': 'flushImmediately', 'type': 'object'}, } def __init__( self, **kwargs ): - super(AzureMySqlTableDataset, self).__init__(**kwargs) - self.type = 'AzureMySqlTable' # type: str - self.table_name = kwargs.get('table_name', None) - self.table = kwargs.get('table', None) + super(AzureDataExplorerSink, self).__init__(**kwargs) + self.type = 'AzureDataExplorerSink' # type: str + self.ingestion_mapping_name = kwargs.get('ingestion_mapping_name', None) + self.ingestion_mapping_as_json = kwargs.get('ingestion_mapping_as_json', None) + self.flush_immediately = kwargs.get('flush_immediately', None) -class AzurePostgreSqlLinkedService(LinkedService): - """Azure PostgreSQL linked service. +class AzureDataExplorerSource(CopySource): + """A copy activity Azure Data Explorer (Kusto) source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: An ODBC connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param password: The Azure key vault secret reference of password in connection string. - :type password: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query: Required. Database query. Should be a Kusto Query Language (KQL) query. Type: + string (or Expression with resultType string). + :type query: object + :param no_truncation: The name of the Boolean option that controls whether truncation is + applied to result-sets that go beyond a certain row-count limit. + :type no_truncation: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).. + :type query_timeout: object """ _validation = { 'type': {'required': True}, + 'query': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + 'no_truncation': {'key': 'noTruncation', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, } def __init__( self, **kwargs ): - super(AzurePostgreSqlLinkedService, self).__init__(**kwargs) - self.type = 'AzurePostgreSql' # type: str - self.connection_string = kwargs.get('connection_string', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(AzureDataExplorerSource, self).__init__(**kwargs) + self.type = 'AzureDataExplorerSource' # type: str + self.query = kwargs['query'] + self.no_truncation = kwargs.get('no_truncation', None) + self.query_timeout = kwargs.get('query_timeout', None) -class AzurePostgreSqlTableDataset(Dataset): - """Azure PostgreSQL dataset. +class AzureDataExplorerTableDataset(Dataset): + """The Azure Data Explorer (Kusto) dataset. All required parameters must be populated in order to send to Azure. @@ -2692,15 +2810,9 @@ class AzurePostgreSqlTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The table name of the Azure PostgreSQL database which includes both schema - and table. Type: string (or Expression with resultType string). - :type table_name: object - :param table: The table name of the Azure PostgreSQL database. Type: string (or Expression with - resultType string). + :param table: The table name of the Azure Data Explorer database. Type: string (or Expression + with resultType string). :type table: object - :param schema_type_properties_schema: The schema name of the Azure PostgreSQL database. Type: - string (or Expression with resultType string). - :type schema_type_properties_schema: object """ _validation = { @@ -2718,86 +2830,106 @@ class AzurePostgreSqlTableDataset(Dataset): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } def __init__( self, **kwargs ): - super(AzurePostgreSqlTableDataset, self).__init__(**kwargs) - self.type = 'AzurePostgreSqlTable' # type: str - self.table_name = kwargs.get('table_name', None) + super(AzureDataExplorerTableDataset, self).__init__(**kwargs) + self.type = 'AzureDataExplorerTable' # type: str self.table = kwargs.get('table', None) - self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) -class AzureSearchIndexDataset(Dataset): - """The Azure Search Index. +class AzureDataLakeAnalyticsLinkedService(LinkedService): + """Azure Data Lake Analytics linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Type of linked service.Constant filled by server. :type type: str - :param description: Dataset description. + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. + :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. + :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param index_name: Required. The name of the Azure Search Index. Type: string (or Expression - with resultType string). - :type index_name: object + :param account_name: Required. The Azure Data Lake Analytics account name. Type: string (or + Expression with resultType string). + :type account_name: object + :param service_principal_id: The ID of the application used to authenticate against the Azure + Data Lake Analytics account. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The Key of the application used to authenticate against the Azure + Data Lake Analytics account. + :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase + :param tenant: Required. The name or ID of the tenant to which the service principal belongs. + Type: string (or Expression with resultType string). + :type tenant: object + :param subscription_id: Data Lake Analytics account subscription ID (if different from Data + Factory account). Type: string (or Expression with resultType string). + :type subscription_id: object + :param resource_group_name: Data Lake Analytics account resource group name (if different from + Data Factory account). Type: string (or Expression with resultType string). + :type resource_group_name: object + :param data_lake_analytics_uri: Azure Data Lake Analytics URI Type: string (or Expression with + resultType string). + :type data_lake_analytics_uri: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, - 'index_name': {'required': True}, + 'account_name': {'required': True}, + 'tenant': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'index_name': {'key': 'typeProperties.indexName', 'type': 'object'}, + 'account_name': {'key': 'typeProperties.accountName', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'subscription_id': {'key': 'typeProperties.subscriptionId', 'type': 'object'}, + 'resource_group_name': {'key': 'typeProperties.resourceGroupName', 'type': 'object'}, + 'data_lake_analytics_uri': {'key': 'typeProperties.dataLakeAnalyticsUri', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, **kwargs ): - super(AzureSearchIndexDataset, self).__init__(**kwargs) - self.type = 'AzureSearchIndex' # type: str - self.index_name = kwargs['index_name'] - - -class AzureSearchLinkedService(LinkedService): - """Linked service for Windows Azure Search Service. - - All required parameters must be populated in order to send to Azure. + super(AzureDataLakeAnalyticsLinkedService, self).__init__(**kwargs) + self.type = 'AzureDataLakeAnalytics' # type: str + self.account_name = kwargs['account_name'] + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs['tenant'] + self.subscription_id = kwargs.get('subscription_id', None) + self.resource_group_name = kwargs.get('resource_group_name', None) + self.data_lake_analytics_uri = kwargs.get('data_lake_analytics_uri', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + + +class AzureDataLakeStoreLinkedService(LinkedService): + """Azure Data Lake Store linked service. + + All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. @@ -2812,11 +2944,27 @@ class AzureSearchLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param url: Required. URL for Azure Search service. Type: string (or Expression with resultType + :param data_lake_store_uri: Required. Data Lake Store service URI. Type: string (or Expression + with resultType string). + :type data_lake_store_uri: object + :param service_principal_id: The ID of the application used to authenticate against the Azure + Data Lake Store account. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The Key of the application used to authenticate against the Azure + Data Lake Store account. + :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal belongs. Type: + string (or Expression with resultType string). + :type tenant: object + :param account_name: Data Lake Store account name. Type: string (or Expression with resultType string). - :type url: object - :param key: Admin Key for Azure Search service. - :type key: ~azure.synapse.artifacts.models.SecretBase + :type account_name: object + :param subscription_id: Data Lake Store account subscription ID (if different from Data Factory + account). Type: string (or Expression with resultType string). + :type subscription_id: object + :param resource_group_name: Data Lake Store account resource group name (if different from Data + Factory account). Type: string (or Expression with resultType string). + :type resource_group_name: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -2825,7 +2973,7 @@ class AzureSearchLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'url': {'required': True}, + 'data_lake_store_uri': {'required': True}, } _attribute_map = { @@ -2835,8 +2983,13 @@ class AzureSearchLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'key': {'key': 'typeProperties.key', 'type': 'SecretBase'}, + 'data_lake_store_uri': {'key': 'typeProperties.dataLakeStoreUri', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'account_name': {'key': 'typeProperties.accountName', 'type': 'object'}, + 'subscription_id': {'key': 'typeProperties.subscriptionId', 'type': 'object'}, + 'resource_group_name': {'key': 'typeProperties.resourceGroupName', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -2844,438 +2997,334 @@ def __init__( self, **kwargs ): - super(AzureSearchLinkedService, self).__init__(**kwargs) - self.type = 'AzureSearch' # type: str - self.url = kwargs['url'] - self.key = kwargs.get('key', None) + super(AzureDataLakeStoreLinkedService, self).__init__(**kwargs) + self.type = 'AzureDataLakeStore' # type: str + self.data_lake_store_uri = kwargs['data_lake_store_uri'] + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs.get('tenant', None) + self.account_name = kwargs.get('account_name', None) + self.subscription_id = kwargs.get('subscription_id', None) + self.resource_group_name = kwargs.get('resource_group_name', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) -class AzureSqlDatabaseLinkedService(LinkedService): - """Microsoft Azure SQL Database linked service. +class AzureDataLakeStoreLocation(DatasetLocation): + """The location of azure data lake store dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Type of dataset storage location.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: Required. The connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param password: The Azure key vault secret reference of password in connection string. - :type password: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference - :param service_principal_id: The ID of the service principal used to authenticate against Azure - SQL Database. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The key of the service principal used to authenticate against - Azure SQL Database. - :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal belongs. Type: - string (or Expression with resultType string). - :type tenant: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with + :param folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + string). + :type file_name: object """ _validation = { 'type': {'required': True}, - 'connection_string': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, } def __init__( self, **kwargs ): - super(AzureSqlDatabaseLinkedService, self).__init__(**kwargs) - self.type = 'AzureSqlDatabase' # type: str - self.connection_string = kwargs['connection_string'] - self.password = kwargs.get('password', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.tenant = kwargs.get('tenant', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(AzureDataLakeStoreLocation, self).__init__(**kwargs) + self.type = 'AzureDataLakeStoreLocation' # type: str -class AzureSqlDWLinkedService(LinkedService): - """Azure SQL Data Warehouse linked service. +class AzureDataLakeStoreReadSettings(StoreReadSettings): + """Azure data lake store read settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. The read setting type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: Required. The connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param password: The Azure key vault secret reference of password in connection string. - :type password: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference - :param service_principal_id: The ID of the service principal used to authenticate against Azure - SQL Data Warehouse. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The key of the service principal used to authenticate against - Azure SQL Data Warehouse. - :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal belongs. Type: - string (or Expression with resultType string). - :type tenant: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :type recursive: object + :param wildcard_folder_path: ADLS wildcardFolderPath. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type wildcard_folder_path: object + :param wildcard_file_name: ADLS wildcardFileName. Type: string (or Expression with resultType + string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. Type: string (or + Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression + with resultType string). + :type modified_datetime_end: object """ _validation = { 'type': {'required': True}, - 'connection_string': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } def __init__( self, **kwargs ): - super(AzureSqlDWLinkedService, self).__init__(**kwargs) - self.type = 'AzureSqlDW' # type: str - self.connection_string = kwargs['connection_string'] - self.password = kwargs.get('password', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.tenant = kwargs.get('tenant', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(AzureDataLakeStoreReadSettings, self).__init__(**kwargs) + self.type = 'AzureDataLakeStoreReadSettings' # type: str + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) -class AzureSqlDWTableDataset(Dataset): - """The Azure SQL Data Warehouse dataset. +class AzureDataLakeStoreSink(CopySink): + """A copy activity Azure Data Lake Store sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Copy sink type.Constant filled by server. :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table - properties instead. - :type table_name: object - :param schema_type_properties_schema: The schema name of the Azure SQL Data Warehouse. Type: - string (or Expression with resultType string). - :type schema_type_properties_schema: object - :param table: The table name of the Azure SQL Data Warehouse. Type: string (or Expression with - resultType string). - :type table: object + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + :param enable_adls_single_file_parallel: Single File Parallel. + :type enable_adls_single_file_parallel: object """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'enable_adls_single_file_parallel': {'key': 'enableAdlsSingleFileParallel', 'type': 'object'}, } def __init__( self, **kwargs ): - super(AzureSqlDWTableDataset, self).__init__(**kwargs) - self.type = 'AzureSqlDWTable' # type: str - self.table_name = kwargs.get('table_name', None) - self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) - self.table = kwargs.get('table', None) + super(AzureDataLakeStoreSink, self).__init__(**kwargs) + self.type = 'AzureDataLakeStoreSink' # type: str + self.copy_behavior = kwargs.get('copy_behavior', None) + self.enable_adls_single_file_parallel = kwargs.get('enable_adls_single_file_parallel', None) -class AzureSqlMILinkedService(LinkedService): - """Azure SQL Managed Instance linked service. +class AzureDataLakeStoreSource(CopySource): + """A copy activity Azure Data Lake source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: Required. The connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param password: The Azure key vault secret reference of password in connection string. - :type password: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference - :param service_principal_id: The ID of the service principal used to authenticate against Azure - SQL Managed Instance. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The key of the service principal used to authenticate against - Azure SQL Managed Instance. - :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal belongs. Type: - string (or Expression with resultType string). - :type tenant: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :type recursive: object """ _validation = { 'type': {'required': True}, - 'connection_string': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, } def __init__( self, **kwargs ): - super(AzureSqlMILinkedService, self).__init__(**kwargs) - self.type = 'AzureSqlMI' # type: str - self.connection_string = kwargs['connection_string'] - self.password = kwargs.get('password', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.tenant = kwargs.get('tenant', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(AzureDataLakeStoreSource, self).__init__(**kwargs) + self.type = 'AzureDataLakeStoreSource' # type: str + self.recursive = kwargs.get('recursive', None) -class AzureSqlMITableDataset(Dataset): - """The Azure SQL Managed Instance dataset. +class AzureDataLakeStoreWriteSettings(StoreWriteSettings): + """Azure data lake store write settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. The write setting type.Constant filled by server. :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table - properties instead. - :type table_name: object - :param schema_type_properties_schema: The schema name of the Azure SQL Managed Instance. Type: - string (or Expression with resultType string). - :type schema_type_properties_schema: object - :param table: The table name of the Azure SQL Managed Instance dataset. Type: string (or - Expression with resultType string). - :type table: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } def __init__( self, **kwargs ): - super(AzureSqlMITableDataset, self).__init__(**kwargs) - self.type = 'AzureSqlMITable' # type: str - self.table_name = kwargs.get('table_name', None) - self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) - self.table = kwargs.get('table', None) + super(AzureDataLakeStoreWriteSettings, self).__init__(**kwargs) + self.type = 'AzureDataLakeStoreWriteSettings' # type: str -class AzureSqlTableDataset(Dataset): - """The Azure SQL Server database dataset. +class Resource(msrest.serialization.Model): + """Resource. - All required parameters must be populated in order to send to Azure. + Variables are only populated by the server, and will be ignored when sending a request. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table - properties instead. - :type table_name: object - :param schema_type_properties_schema: The schema name of the Azure SQL database. Type: string - (or Expression with resultType string). - :type schema_type_properties_schema: object - :param table: The table name of the Azure SQL database. Type: string (or Expression with - resultType string). - :type table: object + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, } def __init__( self, **kwargs ): - super(AzureSqlTableDataset, self).__init__(**kwargs) - self.type = 'AzureSqlTable' # type: str - self.table_name = kwargs.get('table_name', None) - self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) - self.table = kwargs.get('table', None) + super(Resource, self).__init__(**kwargs) + self.id = None + self.name = None + self.type = None -class AzureStorageLinkedService(LinkedService): - """The storage account linked service. +class AzureEntityResource(Resource): + """The resource model definition for a Azure Resource Manager resource with an etag. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + :ivar etag: Resource Etag. + :vartype etag: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureEntityResource, self).__init__(**kwargs) + self.etag = None + + +class AzureFileStorageLinkedService(LinkedService): + """Azure File Storage linked service. All required parameters must be populated in order to send to Azure. @@ -3292,24 +3341,23 @@ class AzureStorageLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param connection_string: The connection string. It is mutually exclusive with sasUri property. - Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param account_key: The Azure key vault secret reference of accountKey in connection string. - :type account_key: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference - :param sas_uri: SAS URI of the Azure Storage resource. It is mutually exclusive with - connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. - :type sas_uri: object - :param sas_token: The Azure key vault secret reference of sasToken in sas uri. - :type sas_token: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param host: Required. Host name of the server. Type: string (or Expression with resultType + string). + :type host: object + :param user_id: User ID to logon the server. Type: string (or Expression with resultType + string). + :type user_id: object + :param password: Password to logon the server. + :type password: ~azure.synapse.artifacts.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: str + :type encrypted_credential: object """ _validation = { 'type': {'required': True}, + 'host': {'required': True}, } _attribute_map = { @@ -3319,118 +3367,91 @@ class AzureStorageLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, - 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, - 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'user_id': {'key': 'typeProperties.userId', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, **kwargs ): - super(AzureStorageLinkedService, self).__init__(**kwargs) - self.type = 'AzureStorage' # type: str - self.connection_string = kwargs.get('connection_string', None) - self.account_key = kwargs.get('account_key', None) - self.sas_uri = kwargs.get('sas_uri', None) - self.sas_token = kwargs.get('sas_token', None) + super(AzureFileStorageLinkedService, self).__init__(**kwargs) + self.type = 'AzureFileStorage' # type: str + self.host = kwargs['host'] + self.user_id = kwargs.get('user_id', None) + self.password = kwargs.get('password', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) -class AzureTableDataset(Dataset): - """The Azure Table storage dataset. +class AzureFileStorageLocation(DatasetLocation): + """The location of file server dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Type of dataset storage location.Constant filled by server. :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: Required. The table name of the Azure Table storage. Type: string (or - Expression with resultType string). - :type table_name: object + :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + string). + :type file_name: object """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, - 'table_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, } def __init__( self, **kwargs ): - super(AzureTableDataset, self).__init__(**kwargs) - self.type = 'AzureTable' # type: str - self.table_name = kwargs['table_name'] + super(AzureFileStorageLocation, self).__init__(**kwargs) + self.type = 'AzureFileStorageLocation' # type: str -class AzureTableStorageLinkedService(LinkedService): - """The azure table storage linked service. +class AzureFileStorageReadSettings(StoreReadSettings): + """Azure File Storage read settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. The read setting type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: The connection string. It is mutually exclusive with sasUri property. - Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param account_key: The Azure key vault secret reference of accountKey in connection string. - :type account_key: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference - :param sas_uri: SAS URI of the Azure Storage resource. It is mutually exclusive with - connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. - :type sas_uri: object - :param sas_token: The Azure key vault secret reference of sasToken in sas uri. - :type sas_token: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: str + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :type recursive: object + :param wildcard_folder_path: Azure File Storage wildcardFolderPath. Type: string (or Expression + with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Azure File Storage wildcardFileName. Type: string (or Expression + with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. Type: string (or + Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression + with resultType string). + :type modified_datetime_end: object """ _validation = { @@ -3440,125 +3461,159 @@ class AzureTableStorageLinkedService(LinkedService): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, - 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, - 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } def __init__( self, **kwargs ): - super(AzureTableStorageLinkedService, self).__init__(**kwargs) - self.type = 'AzureTableStorage' # type: str - self.connection_string = kwargs.get('connection_string', None) - self.account_key = kwargs.get('account_key', None) - self.sas_uri = kwargs.get('sas_uri', None) - self.sas_token = kwargs.get('sas_token', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(AzureFileStorageReadSettings, self).__init__(**kwargs) + self.type = 'AzureFileStorageReadSettings' # type: str + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) -class BigDataPoolReference(msrest.serialization.Model): - """Big data pool reference. +class AzureFunctionActivity(ExecutionActivity): + """Azure Function activity. All required parameters must be populated in order to send to Azure. - :param type: Required. Big data pool reference type. Possible values include: - "BigDataPoolReference". - :type type: str or ~azure.synapse.artifacts.models.BigDataPoolReferenceType - :param reference_name: Required. Reference big data pool name. - :type reference_name: str + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param linked_service_name: Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param method: Required. Rest API method for target endpoint. Possible values include: "GET", + "POST", "PUT", "DELETE", "OPTIONS", "HEAD", "TRACE". + :type method: str or ~azure.synapse.artifacts.models.AzureFunctionActivityMethod + :param function_name: Required. Name of the Function that the Azure Function Activity will + call. Type: string (or Expression with resultType string). + :type function_name: object + :param headers: Represents the headers that will be sent to the request. For example, to set + the language and type on a request: "headers" : { "Accept-Language": "en-us", "Content-Type": + "application/json" }. Type: string (or Expression with resultType string). + :type headers: object + :param body: Represents the payload that will be sent to the endpoint. Required for POST/PUT + method, not allowed for GET method Type: string (or Expression with resultType string). + :type body: object """ _validation = { + 'name': {'required': True}, 'type': {'required': True}, - 'reference_name': {'required': True}, + 'method': {'required': True}, + 'function_name': {'required': True}, } _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'reference_name': {'key': 'referenceName', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'method': {'key': 'typeProperties.method', 'type': 'str'}, + 'function_name': {'key': 'typeProperties.functionName', 'type': 'object'}, + 'headers': {'key': 'typeProperties.headers', 'type': 'object'}, + 'body': {'key': 'typeProperties.body', 'type': 'object'}, } def __init__( self, **kwargs ): - super(BigDataPoolReference, self).__init__(**kwargs) - self.type = kwargs['type'] - self.reference_name = kwargs['reference_name'] + super(AzureFunctionActivity, self).__init__(**kwargs) + self.type = 'AzureFunctionActivity' # type: str + self.method = kwargs['method'] + self.function_name = kwargs['function_name'] + self.headers = kwargs.get('headers', None) + self.body = kwargs.get('body', None) -class BinaryDataset(Dataset): - """Binary dataset. +class AzureFunctionLinkedService(LinkedService): + """Azure Function linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Type of linked service.Constant filled by server. :type type: str - :param description: Dataset description. + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. + :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. + :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param location: The location of the Binary storage. - :type location: ~azure.synapse.artifacts.models.DatasetLocation - :param compression: The data compression method used for the binary dataset. - :type compression: ~azure.synapse.artifacts.models.DatasetCompression + :param function_app_url: Required. The endpoint of the Azure Function App. URL will be in the + format https://:code:``.azurewebsites.net. + :type function_app_url: object + :param function_key: Function or Host key for Azure Function App. + :type function_key: ~azure.synapse.artifacts.models.SecretBase + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, + 'function_app_url': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + 'function_app_url': {'key': 'typeProperties.functionAppUrl', 'type': 'object'}, + 'function_key': {'key': 'typeProperties.functionKey', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, **kwargs ): - super(BinaryDataset, self).__init__(**kwargs) - self.type = 'Binary' # type: str - self.location = kwargs.get('location', None) - self.compression = kwargs.get('compression', None) + super(AzureFunctionLinkedService, self).__init__(**kwargs) + self.type = 'AzureFunction' # type: str + self.function_app_url = kwargs['function_app_url'] + self.function_key = kwargs.get('function_key', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) -class CassandraLinkedService(LinkedService): - """Linked service for Cassandra data source. +class AzureKeyVaultLinkedService(LinkedService): + """Azure Key Vault linked service. All required parameters must be populated in order to send to Azure. @@ -3575,29 +3630,14 @@ class CassandraLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param host: Required. Host name for connection. Type: string (or Expression with resultType - string). - :type host: object - :param authentication_type: AuthenticationType to be used for connection. Type: string (or - Expression with resultType string). - :type authentication_type: object - :param port: The port for the connection. Type: integer (or Expression with resultType - integer). - :type port: object - :param username: Username for authentication. Type: string (or Expression with resultType - string). - :type username: object - :param password: Password for authentication. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param base_url: Required. The base URL of the Azure Key Vault. e.g. + https://myakv.vault.azure.net Type: string (or Expression with resultType string). + :type base_url: object """ _validation = { 'type': {'required': True}, - 'host': {'required': True}, + 'base_url': {'required': True}, } _attribute_map = { @@ -3607,132 +3647,199 @@ class CassandraLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'base_url': {'key': 'typeProperties.baseUrl', 'type': 'object'}, } def __init__( self, **kwargs ): - super(CassandraLinkedService, self).__init__(**kwargs) - self.type = 'Cassandra' # type: str - self.host = kwargs['host'] - self.authentication_type = kwargs.get('authentication_type', None) - self.port = kwargs.get('port', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(AzureKeyVaultLinkedService, self).__init__(**kwargs) + self.type = 'AzureKeyVault' # type: str + self.base_url = kwargs['base_url'] -class CassandraTableDataset(Dataset): - """The Cassandra database dataset. +class SecretBase(msrest.serialization.Model): + """The base definition of a secret type. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AzureKeyVaultSecretReference, SecureString. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Type of the secret.Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'AzureKeyVaultSecret': 'AzureKeyVaultSecretReference', 'SecureString': 'SecureString'} + } + + def __init__( + self, + **kwargs + ): + super(SecretBase, self).__init__(**kwargs) + self.type = None # type: Optional[str] + + +class AzureKeyVaultSecretReference(SecretBase): + """Azure Key Vault secret reference. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Type of the secret.Constant filled by server. + :type type: str + :param store: Required. The Azure Key Vault linked service reference. + :type store: ~azure.synapse.artifacts.models.LinkedServiceReference + :param secret_name: Required. The name of the secret in Azure Key Vault. Type: string (or + Expression with resultType string). + :type secret_name: object + :param secret_version: The version of the secret in Azure Key Vault. The default value is the + latest version of the secret. Type: string (or Expression with resultType string). + :type secret_version: object + """ + + _validation = { + 'type': {'required': True}, + 'store': {'required': True}, + 'secret_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'store': {'key': 'store', 'type': 'LinkedServiceReference'}, + 'secret_name': {'key': 'secretName', 'type': 'object'}, + 'secret_version': {'key': 'secretVersion', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureKeyVaultSecretReference, self).__init__(**kwargs) + self.type = 'AzureKeyVaultSecret' # type: str + self.store = kwargs['store'] + self.secret_name = kwargs['secret_name'] + self.secret_version = kwargs.get('secret_version', None) + + +class AzureMariaDBLinkedService(LinkedService): + """Azure Database for MariaDB linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Type of linked service.Constant filled by server. :type type: str - :param description: Dataset description. + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. + :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. + :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The table name of the Cassandra database. Type: string (or Expression with - resultType string). - :type table_name: object - :param keyspace: The keyspace of the Cassandra database. Type: string (or Expression with + :param connection_string: An ODBC connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param pwd: The Azure key vault secret reference of password in connection string. + :type pwd: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type keyspace: object + :type encrypted_credential: object """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'keyspace': {'key': 'typeProperties.keyspace', 'type': 'object'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, **kwargs ): - super(CassandraTableDataset, self).__init__(**kwargs) - self.type = 'CassandraTable' # type: str - self.table_name = kwargs.get('table_name', None) - self.keyspace = kwargs.get('keyspace', None) + super(AzureMariaDBLinkedService, self).__init__(**kwargs) + self.type = 'AzureMariaDB' # type: str + self.connection_string = kwargs.get('connection_string', None) + self.pwd = kwargs.get('pwd', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) -class CloudError(msrest.serialization.Model): - """The object that defines the structure of an Azure Synapse error response. +class AzureMariaDBSource(TabularSource): + """A copy activity Azure MariaDB source. All required parameters must be populated in order to send to Azure. - :param code: Required. Error code. - :type code: str - :param message: Required. Error message. - :type message: str - :param target: Property name/path in request associated with error. - :type target: str - :param details: Array with additional error details. - :type details: list[~azure.synapse.artifacts.models.CloudError] + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object """ _validation = { - 'code': {'required': True}, - 'message': {'required': True}, + 'type': {'required': True}, } _attribute_map = { - 'code': {'key': 'error.code', 'type': 'str'}, - 'message': {'key': 'error.message', 'type': 'str'}, - 'target': {'key': 'error.target', 'type': 'str'}, - 'details': {'key': 'error.details', 'type': '[CloudError]'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, } def __init__( self, **kwargs ): - super(CloudError, self).__init__(**kwargs) - self.code = kwargs['code'] - self.message = kwargs['message'] - self.target = kwargs.get('target', None) - self.details = kwargs.get('details', None) + super(AzureMariaDBSource, self).__init__(**kwargs) + self.type = 'AzureMariaDBSource' # type: str + self.query = kwargs.get('query', None) -class CommonDataServiceForAppsEntityDataset(Dataset): - """The Common Data Service for Apps entity dataset. +class AzureMariaDBTableDataset(Dataset): + """Azure Database for MariaDB dataset. All required parameters must be populated in order to send to Azure. @@ -3758,9 +3865,8 @@ class CommonDataServiceForAppsEntityDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param entity_name: The logical name of the entity. Type: string (or Expression with resultType - string). - :type entity_name: object + :param table_name: The table name. Type: string (or Expression with resultType string). + :type table_name: object """ _validation = { @@ -3778,317 +3884,329 @@ class CommonDataServiceForAppsEntityDataset(Dataset): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } def __init__( self, **kwargs ): - super(CommonDataServiceForAppsEntityDataset, self).__init__(**kwargs) - self.type = 'CommonDataServiceForAppsEntity' # type: str - self.entity_name = kwargs.get('entity_name', None) + super(AzureMariaDBTableDataset, self).__init__(**kwargs) + self.type = 'AzureMariaDBTable' # type: str + self.table_name = kwargs.get('table_name', None) -class CommonDataServiceForAppsLinkedService(LinkedService): - """Common Data Service for Apps linked service. +class AzureMLBatchExecutionActivity(ExecutionActivity): + """Azure ML Batch Execution activity. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. + :param description: Activity description. :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param deployment_type: Required. The deployment type of the Common Data Service for Apps - instance. 'Online' for Common Data Service for Apps Online and 'OnPremisesWithIfd' for Common - Data Service for Apps on-premises with Ifd. Type: string (or Expression with resultType - string). Possible values include: "Online", "OnPremisesWithIfd". - :type deployment_type: str or ~azure.synapse.artifacts.models.DynamicsDeploymentType - :param host_name: The host name of the on-premises Common Data Service for Apps server. The - property is required for on-prem and not allowed for online. Type: string (or Expression with - resultType string). - :type host_name: object - :param port: The port of on-premises Common Data Service for Apps server. The property is - required for on-prem and not allowed for online. Default is 443. Type: integer (or Expression - with resultType integer), minimum: 0. - :type port: object - :param service_uri: The URL to the Microsoft Common Data Service for Apps server. The property - is required for on-line and not allowed for on-prem. Type: string (or Expression with - resultType string). - :type service_uri: object - :param organization_name: The organization name of the Common Data Service for Apps instance. - The property is required for on-prem and required for online when there are more than one - Common Data Service for Apps instances associated with the user. Type: string (or Expression - with resultType string). - :type organization_name: object - :param authentication_type: Required. The authentication type to connect to Common Data Service - for Apps server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario. - 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or - Expression with resultType string). Possible values include: "Office365", "Ifd", - "AADServicePrincipal". - :type authentication_type: str or ~azure.synapse.artifacts.models.DynamicsAuthenticationType - :param username: User name to access the Common Data Service for Apps instance. Type: string - (or Expression with resultType string). - :type username: object - :param password: Password to access the Common Data Service for Apps instance. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param service_principal_id: The client ID of the application in Azure Active Directory used - for Server-To-Server authentication. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_credential_type: The service principal credential type to use in - Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' - for certificate. Type: string (or Expression with resultType string). Possible values include: - "ServicePrincipalKey", "ServicePrincipalCert". - :type service_principal_credential_type: str or - ~azure.synapse.artifacts.models.DynamicsServicePrincipalCredentialType - :param service_principal_credential: The credential of the service principal object in Azure - Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', - servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If - servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only - be AzureKeyVaultSecretReference. - :type service_principal_credential: ~azure.synapse.artifacts.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'deployment_type': {'required': True}, - 'authentication_type': {'required': True}, + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param linked_service_name: Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param global_parameters: Key,Value pairs to be passed to the Azure ML Batch Execution Service + endpoint. Keys must match the names of web service parameters defined in the published Azure ML + web service. Values will be passed in the GlobalParameters property of the Azure ML batch + execution request. + :type global_parameters: dict[str, object] + :param web_service_outputs: Key,Value pairs, mapping the names of Azure ML endpoint's Web + Service Outputs to AzureMLWebServiceFile objects specifying the output Blob locations. This + information will be passed in the WebServiceOutputs property of the Azure ML batch execution + request. + :type web_service_outputs: dict[str, ~azure.synapse.artifacts.models.AzureMLWebServiceFile] + :param web_service_inputs: Key,Value pairs, mapping the names of Azure ML endpoint's Web + Service Inputs to AzureMLWebServiceFile objects specifying the input Blob locations.. This + information will be passed in the WebServiceInputs property of the Azure ML batch execution + request. + :type web_service_inputs: dict[str, ~azure.synapse.artifacts.models.AzureMLWebServiceFile] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, - 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, - 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'str'}, - 'service_principal_credential': {'key': 'typeProperties.servicePrincipalCredential', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'global_parameters': {'key': 'typeProperties.globalParameters', 'type': '{object}'}, + 'web_service_outputs': {'key': 'typeProperties.webServiceOutputs', 'type': '{AzureMLWebServiceFile}'}, + 'web_service_inputs': {'key': 'typeProperties.webServiceInputs', 'type': '{AzureMLWebServiceFile}'}, } def __init__( self, **kwargs ): - super(CommonDataServiceForAppsLinkedService, self).__init__(**kwargs) - self.type = 'CommonDataServiceForApps' # type: str - self.deployment_type = kwargs['deployment_type'] - self.host_name = kwargs.get('host_name', None) - self.port = kwargs.get('port', None) - self.service_uri = kwargs.get('service_uri', None) - self.organization_name = kwargs.get('organization_name', None) - self.authentication_type = kwargs['authentication_type'] - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_credential_type = kwargs.get('service_principal_credential_type', None) - self.service_principal_credential = kwargs.get('service_principal_credential', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(AzureMLBatchExecutionActivity, self).__init__(**kwargs) + self.type = 'AzureMLBatchExecution' # type: str + self.global_parameters = kwargs.get('global_parameters', None) + self.web_service_outputs = kwargs.get('web_service_outputs', None) + self.web_service_inputs = kwargs.get('web_service_inputs', None) -class ConcurLinkedService(LinkedService): - """Concur Service linked service. +class AzureMLExecutePipelineActivity(ExecutionActivity): + """Azure ML Execute Pipeline activity. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. + :param description: Activity description. :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param client_id: Required. Application client_id supplied by Concur App Management. - :type client_id: object - :param username: Required. The user name that you use to access Concur Service. - :type username: object - :param password: The password corresponding to the user name that you provided in the username - field. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using - HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name in the server's - certificate to match the host name of the server when connecting over SSL. The default value is - true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of the server when - connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param linked_service_name: Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param ml_pipeline_id: Required. ID of the published Azure ML pipeline. Type: string (or + Expression with resultType string). + :type ml_pipeline_id: object + :param experiment_name: Run history experiment name of the pipeline run. This information will + be passed in the ExperimentName property of the published pipeline execution request. Type: + string (or Expression with resultType string). + :type experiment_name: object + :param ml_pipeline_parameters: Key,Value pairs to be passed to the published Azure ML pipeline + endpoint. Keys must match the names of pipeline parameters defined in the published pipeline. + Values will be passed in the ParameterAssignments property of the published pipeline execution + request. Type: object with key value pairs (or Expression with resultType object). + :type ml_pipeline_parameters: object + :param ml_parent_run_id: The parent Azure ML Service pipeline run id. This information will be + passed in the ParentRunId property of the published pipeline execution request. Type: string + (or Expression with resultType string). + :type ml_parent_run_id: object + :param continue_on_step_failure: Whether to continue execution of other steps in the + PipelineRun if a step fails. This information will be passed in the continueOnStepFailure + property of the published pipeline execution request. Type: boolean (or Expression with + resultType boolean). + :type continue_on_step_failure: object """ _validation = { + 'name': {'required': True}, 'type': {'required': True}, - 'client_id': {'required': True}, - 'username': {'required': True}, + 'ml_pipeline_id': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'ml_pipeline_id': {'key': 'typeProperties.mlPipelineId', 'type': 'object'}, + 'experiment_name': {'key': 'typeProperties.experimentName', 'type': 'object'}, + 'ml_pipeline_parameters': {'key': 'typeProperties.mlPipelineParameters', 'type': 'object'}, + 'ml_parent_run_id': {'key': 'typeProperties.mlParentRunId', 'type': 'object'}, + 'continue_on_step_failure': {'key': 'typeProperties.continueOnStepFailure', 'type': 'object'}, } def __init__( self, **kwargs ): - super(ConcurLinkedService, self).__init__(**kwargs) - self.type = 'Concur' # type: str - self.client_id = kwargs['client_id'] - self.username = kwargs['username'] - self.password = kwargs.get('password', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(AzureMLExecutePipelineActivity, self).__init__(**kwargs) + self.type = 'AzureMLExecutePipeline' # type: str + self.ml_pipeline_id = kwargs['ml_pipeline_id'] + self.experiment_name = kwargs.get('experiment_name', None) + self.ml_pipeline_parameters = kwargs.get('ml_pipeline_parameters', None) + self.ml_parent_run_id = kwargs.get('ml_parent_run_id', None) + self.continue_on_step_failure = kwargs.get('continue_on_step_failure', None) -class ConcurObjectDataset(Dataset): - """Concur Service dataset. +class AzureMLLinkedService(LinkedService): + """Azure ML Studio Web Service linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Type of linked service.Constant filled by server. :type type: str - :param description: Dataset description. + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. + :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. + :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :param ml_endpoint: Required. The Batch Execution REST URL for an Azure ML Studio Web Service + endpoint. Type: string (or Expression with resultType string). + :type ml_endpoint: object + :param api_key: Required. The API key for accessing the Azure ML model endpoint. + :type api_key: ~azure.synapse.artifacts.models.SecretBase + :param update_resource_endpoint: The Update Resource REST URL for an Azure ML Studio Web + Service endpoint. Type: string (or Expression with resultType string). + :type update_resource_endpoint: object + :param service_principal_id: The ID of the service principal used to authenticate against the + ARM-based updateResourceEndpoint of an Azure ML Studio web service. Type: string (or Expression + with resultType string). + :type service_principal_id: object + :param service_principal_key: The key of the service principal used to authenticate against the + ARM-based updateResourceEndpoint of an Azure ML Studio web service. + :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal belongs. Type: + string (or Expression with resultType string). + :type tenant: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, + 'ml_endpoint': {'required': True}, + 'api_key': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'ml_endpoint': {'key': 'typeProperties.mlEndpoint', 'type': 'object'}, + 'api_key': {'key': 'typeProperties.apiKey', 'type': 'SecretBase'}, + 'update_resource_endpoint': {'key': 'typeProperties.updateResourceEndpoint', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, **kwargs ): - super(ConcurObjectDataset, self).__init__(**kwargs) - self.type = 'ConcurObject' # type: str - self.table_name = kwargs.get('table_name', None) + super(AzureMLLinkedService, self).__init__(**kwargs) + self.type = 'AzureML' # type: str + self.ml_endpoint = kwargs['ml_endpoint'] + self.api_key = kwargs['api_key'] + self.update_resource_endpoint = kwargs.get('update_resource_endpoint', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs.get('tenant', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) -class ControlActivity(Activity): - """Base class for all control activities like IfCondition, ForEach , Until. +class AzureMLServiceLinkedService(LinkedService): + """Azure ML Service linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. Type of linked service.Constant filled by server. :type type: str - :param description: Activity description. + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param subscription_id: Required. Azure ML Service workspace subscription ID. Type: string (or + Expression with resultType string). + :type subscription_id: object + :param resource_group_name: Required. Azure ML Service workspace resource group name. Type: + string (or Expression with resultType string). + :type resource_group_name: object + :param ml_workspace_name: Required. Azure ML Service workspace name. Type: string (or + Expression with resultType string). + :type ml_workspace_name: object + :param service_principal_id: The ID of the service principal used to authenticate against the + endpoint of a published Azure ML Service pipeline. Type: string (or Expression with resultType + string). + :type service_principal_id: object + :param service_principal_key: The key of the service principal used to authenticate against the + endpoint of a published Azure ML Service pipeline. + :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal belongs. Type: + string (or Expression with resultType string). + :type tenant: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, + 'subscription_id': {'required': True}, + 'resource_group_name': {'required': True}, + 'ml_workspace_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'subscription_id': {'key': 'typeProperties.subscriptionId', 'type': 'object'}, + 'resource_group_name': {'key': 'typeProperties.resourceGroupName', 'type': 'object'}, + 'ml_workspace_name': {'key': 'typeProperties.mlWorkspaceName', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, **kwargs ): - super(ControlActivity, self).__init__(**kwargs) - self.type = 'Container' # type: str + super(AzureMLServiceLinkedService, self).__init__(**kwargs) + self.type = 'AzureMLService' # type: str + self.subscription_id = kwargs['subscription_id'] + self.resource_group_name = kwargs['resource_group_name'] + self.ml_workspace_name = kwargs['ml_workspace_name'] + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs.get('tenant', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) -class CopyActivity(ExecutionActivity): - """Copy activity. +class AzureMLUpdateResourceActivity(ExecutionActivity): + """Azure ML Update Resource management activity. All required parameters must be populated in order to send to Azure. @@ -4109,46 +4227,24 @@ class CopyActivity(ExecutionActivity): :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param inputs: List of inputs for the activity. - :type inputs: list[~azure.synapse.artifacts.models.DatasetReference] - :param outputs: List of outputs for the activity. - :type outputs: list[~azure.synapse.artifacts.models.DatasetReference] - :param source: Required. Copy activity source. - :type source: ~azure.synapse.artifacts.models.CopySource - :param sink: Required. Copy activity sink. - :type sink: ~azure.synapse.artifacts.models.CopySink - :param translator: Copy activity translator. If not specified, tabular translator is used. - :type translator: object - :param enable_staging: Specifies whether to copy data via an interim staging. Default value is - false. Type: boolean (or Expression with resultType boolean). - :type enable_staging: object - :param staging_settings: Specifies interim staging settings when EnableStaging is true. - :type staging_settings: ~azure.synapse.artifacts.models.StagingSettings - :param parallel_copies: Maximum number of concurrent sessions opened on the source or sink to - avoid overloading the data store. Type: integer (or Expression with resultType integer), - minimum: 0. - :type parallel_copies: object - :param data_integration_units: Maximum number of data integration units that can be used to - perform this data movement. Type: integer (or Expression with resultType integer), minimum: 0. - :type data_integration_units: object - :param enable_skip_incompatible_row: Whether to skip incompatible row. Default value is false. - Type: boolean (or Expression with resultType boolean). - :type enable_skip_incompatible_row: object - :param redirect_incompatible_row_settings: Redirect incompatible row settings when - EnableSkipIncompatibleRow is true. - :type redirect_incompatible_row_settings: - ~azure.synapse.artifacts.models.RedirectIncompatibleRowSettings - :param preserve_rules: Preserve Rules. - :type preserve_rules: list[object] - :param preserve: Preserve rules. - :type preserve: list[object] + :param trained_model_name: Required. Name of the Trained Model module in the Web Service + experiment to be updated. Type: string (or Expression with resultType string). + :type trained_model_name: object + :param trained_model_linked_service_name: Required. Name of Azure Storage linked service + holding the .ilearner file that will be uploaded by the update operation. + :type trained_model_linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param trained_model_file_path: Required. The relative file path in trainedModelLinkedService + to represent the .ilearner file that will be uploaded by the update operation. Type: string + (or Expression with resultType string). + :type trained_model_file_path: object """ _validation = { 'name': {'required': True}, 'type': {'required': True}, - 'source': {'required': True}, - 'sink': {'required': True}, + 'trained_model_name': {'required': True}, + 'trained_model_linked_service_name': {'required': True}, + 'trained_model_file_path': {'required': True}, } _attribute_map = { @@ -4160,47 +4256,113 @@ class CopyActivity(ExecutionActivity): 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'inputs': {'key': 'inputs', 'type': '[DatasetReference]'}, - 'outputs': {'key': 'outputs', 'type': '[DatasetReference]'}, - 'source': {'key': 'typeProperties.source', 'type': 'CopySource'}, - 'sink': {'key': 'typeProperties.sink', 'type': 'CopySink'}, - 'translator': {'key': 'typeProperties.translator', 'type': 'object'}, - 'enable_staging': {'key': 'typeProperties.enableStaging', 'type': 'object'}, - 'staging_settings': {'key': 'typeProperties.stagingSettings', 'type': 'StagingSettings'}, - 'parallel_copies': {'key': 'typeProperties.parallelCopies', 'type': 'object'}, - 'data_integration_units': {'key': 'typeProperties.dataIntegrationUnits', 'type': 'object'}, - 'enable_skip_incompatible_row': {'key': 'typeProperties.enableSkipIncompatibleRow', 'type': 'object'}, - 'redirect_incompatible_row_settings': {'key': 'typeProperties.redirectIncompatibleRowSettings', 'type': 'RedirectIncompatibleRowSettings'}, - 'preserve_rules': {'key': 'typeProperties.preserveRules', 'type': '[object]'}, - 'preserve': {'key': 'typeProperties.preserve', 'type': '[object]'}, + 'trained_model_name': {'key': 'typeProperties.trainedModelName', 'type': 'object'}, + 'trained_model_linked_service_name': {'key': 'typeProperties.trainedModelLinkedServiceName', 'type': 'LinkedServiceReference'}, + 'trained_model_file_path': {'key': 'typeProperties.trainedModelFilePath', 'type': 'object'}, } def __init__( self, **kwargs ): - super(CopyActivity, self).__init__(**kwargs) - self.type = 'Copy' # type: str - self.inputs = kwargs.get('inputs', None) - self.outputs = kwargs.get('outputs', None) - self.source = kwargs['source'] - self.sink = kwargs['sink'] - self.translator = kwargs.get('translator', None) - self.enable_staging = kwargs.get('enable_staging', None) - self.staging_settings = kwargs.get('staging_settings', None) - self.parallel_copies = kwargs.get('parallel_copies', None) - self.data_integration_units = kwargs.get('data_integration_units', None) - self.enable_skip_incompatible_row = kwargs.get('enable_skip_incompatible_row', None) - self.redirect_incompatible_row_settings = kwargs.get('redirect_incompatible_row_settings', None) - self.preserve_rules = kwargs.get('preserve_rules', None) - self.preserve = kwargs.get('preserve', None) + super(AzureMLUpdateResourceActivity, self).__init__(**kwargs) + self.type = 'AzureMLUpdateResource' # type: str + self.trained_model_name = kwargs['trained_model_name'] + self.trained_model_linked_service_name = kwargs['trained_model_linked_service_name'] + self.trained_model_file_path = kwargs['trained_model_file_path'] -class CopySink(msrest.serialization.Model): - """A copy activity sink. +class AzureMLWebServiceFile(msrest.serialization.Model): + """Azure ML WebService Input/Output file. + + All required parameters must be populated in order to send to Azure. + + :param file_path: Required. The relative file path, including container name, in the Azure Blob + Storage specified by the LinkedService. Type: string (or Expression with resultType string). + :type file_path: object + :param linked_service_name: Required. Reference to an Azure Storage LinkedService, where Azure + ML WebService Input/Output file located. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + """ + + _validation = { + 'file_path': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'file_path': {'key': 'filePath', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureMLWebServiceFile, self).__init__(**kwargs) + self.file_path = kwargs['file_path'] + self.linked_service_name = kwargs['linked_service_name'] - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: . + +class AzureMySqlLinkedService(LinkedService): + """Azure MySQL database linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param connection_string: Required. The connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param password: The Azure key vault secret reference of password in connection string. + :type password: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureMySqlLinkedService, self).__init__(**kwargs) + self.type = 'AzureMySql' # type: str + self.connection_string = kwargs['connection_string'] + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + + +class AzureMySqlSink(CopySink): + """A copy activity Azure MySql sink. All required parameters must be populated in order to send to Azure. @@ -4224,6 +4386,9 @@ class CopySink(msrest.serialization.Model): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param pre_copy_script: A query to execute before starting the copy. Type: string (or + Expression with resultType string). + :type pre_copy_script: object """ _validation = { @@ -4238,31 +4403,20 @@ class CopySink(msrest.serialization.Model): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - } - - _subtype_map = { - 'type': {} + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } def __init__( self, **kwargs ): - super(CopySink, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.type = 'CopySink' # type: str - self.write_batch_size = kwargs.get('write_batch_size', None) - self.write_batch_timeout = kwargs.get('write_batch_timeout', None) - self.sink_retry_count = kwargs.get('sink_retry_count', None) - self.sink_retry_wait = kwargs.get('sink_retry_wait', None) - self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) - + super(AzureMySqlSink, self).__init__(**kwargs) + self.type = 'AzureMySqlSink' # type: str + self.pre_copy_script = kwargs.get('pre_copy_script', None) -class CopySource(msrest.serialization.Model): - """A copy activity source. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: . +class AzureMySqlSource(TabularSource): + """A copy activity Azure MySQL source. All required parameters must be populated in order to send to Azure. @@ -4280,6 +4434,11 @@ class CopySource(msrest.serialization.Model): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: Database query. Type: string (or Expression with resultType string). + :type query: object """ _validation = { @@ -4292,92 +4451,21 @@ class CopySource(msrest.serialization.Model): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - } - - _subtype_map = { - 'type': {} - } - - def __init__( - self, - **kwargs - ): - super(CopySource, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.type = 'CopySource' # type: str - self.source_retry_count = kwargs.get('source_retry_count', None) - self.source_retry_wait = kwargs.get('source_retry_wait', None) - self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) - - -class CosmosDbLinkedService(LinkedService): - """Microsoft Azure Cosmos Database (CosmosDB) linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: The connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param account_endpoint: The endpoint of the Azure CosmosDB account. Type: string (or - Expression with resultType string). - :type account_endpoint: object - :param database: The name of the database. Type: string (or Expression with resultType string). - :type database: object - :param account_key: The account key of the Azure CosmosDB account. Type: SecureString or - AzureKeyVaultSecretReference. - :type account_key: ~azure.synapse.artifacts.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'account_endpoint': {'key': 'typeProperties.accountEndpoint', 'type': 'object'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, - 'account_key': {'key': 'typeProperties.accountKey', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, } def __init__( self, **kwargs ): - super(CosmosDbLinkedService, self).__init__(**kwargs) - self.type = 'CosmosDb' # type: str - self.connection_string = kwargs.get('connection_string', None) - self.account_endpoint = kwargs.get('account_endpoint', None) - self.database = kwargs.get('database', None) - self.account_key = kwargs.get('account_key', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(AzureMySqlSource, self).__init__(**kwargs) + self.type = 'AzureMySqlSource' # type: str + self.query = kwargs.get('query', None) -class CosmosDbMongoDbApiCollectionDataset(Dataset): - """The CosmosDB (MongoDB API) database dataset. +class AzureMySqlTableDataset(Dataset): + """The Azure MySQL database dataset. All required parameters must be populated in order to send to Azure. @@ -4403,15 +4491,17 @@ class CosmosDbMongoDbApiCollectionDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param collection: Required. The collection name of the CosmosDB (MongoDB API) database. Type: - string (or Expression with resultType string). - :type collection: object + :param table_name: The Azure MySQL database table name. Type: string (or Expression with + resultType string). + :type table_name: object + :param table: The name of Azure MySQL database table. Type: string (or Expression with + resultType string). + :type table: object """ _validation = { 'type': {'required': True}, 'linked_service_name': {'required': True}, - 'collection': {'required': True}, } _attribute_map = { @@ -4424,20 +4514,22 @@ class CosmosDbMongoDbApiCollectionDataset(Dataset): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, } def __init__( self, **kwargs ): - super(CosmosDbMongoDbApiCollectionDataset, self).__init__(**kwargs) - self.type = 'CosmosDbMongoDbApiCollection' # type: str - self.collection = kwargs['collection'] + super(AzureMySqlTableDataset, self).__init__(**kwargs) + self.type = 'AzureMySqlTable' # type: str + self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) -class CosmosDbMongoDbApiLinkedService(LinkedService): - """Linked service for CosmosDB (MongoDB API) data source. +class AzurePostgreSqlLinkedService(LinkedService): + """Azure PostgreSQL linked service. All required parameters must be populated in order to send to Azure. @@ -4454,19 +4546,19 @@ class CosmosDbMongoDbApiLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param connection_string: Required. The CosmosDB (MongoDB API) connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or + :param connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object - :param database: Required. The name of the CosmosDB (MongoDB API) database that you want to - access. Type: string (or Expression with resultType string). - :type database: object + :param password: The Azure key vault secret reference of password in connection string. + :type password: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object """ _validation = { 'type': {'required': True}, - 'connection_string': {'required': True}, - 'database': {'required': True}, } _attribute_map = { @@ -4477,106 +4569,100 @@ class CosmosDbMongoDbApiLinkedService(LinkedService): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, **kwargs ): - super(CosmosDbMongoDbApiLinkedService, self).__init__(**kwargs) - self.type = 'CosmosDbMongoDbApi' # type: str - self.connection_string = kwargs['connection_string'] - self.database = kwargs['database'] + super(AzurePostgreSqlLinkedService, self).__init__(**kwargs) + self.type = 'AzurePostgreSql' # type: str + self.connection_string = kwargs.get('connection_string', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) -class CosmosDbSqlApiCollectionDataset(Dataset): - """Microsoft Azure CosmosDB (SQL API) Collection dataset. +class AzurePostgreSqlSink(CopySink): + """A copy activity Azure PostgreSQL sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Copy sink type.Constant filled by server. :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param collection_name: Required. CosmosDB (SQL API) collection name. Type: string (or + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param pre_copy_script: A query to execute before starting the copy. Type: string (or Expression with resultType string). - :type collection_name: object + :type pre_copy_script: object """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, - 'collection_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'collection_name': {'key': 'typeProperties.collectionName', 'type': 'object'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } def __init__( self, **kwargs ): - super(CosmosDbSqlApiCollectionDataset, self).__init__(**kwargs) - self.type = 'CosmosDbSqlApiCollection' # type: str - self.collection_name = kwargs['collection_name'] + super(AzurePostgreSqlSink, self).__init__(**kwargs) + self.type = 'AzurePostgreSqlSink' # type: str + self.pre_copy_script = kwargs.get('pre_copy_script', None) -class CouchbaseLinkedService(LinkedService): - """Couchbase server linked service. +class AzurePostgreSqlSource(TabularSource): + """A copy activity Azure PostgreSQL source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: An ODBC connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param cred_string: The Azure key vault secret reference of credString in connection string. - :type cred_string: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object """ _validation = { @@ -4586,28 +4672,24 @@ class CouchbaseLinkedService(LinkedService): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'cred_string': {'key': 'typeProperties.credString', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, } def __init__( self, **kwargs ): - super(CouchbaseLinkedService, self).__init__(**kwargs) - self.type = 'Couchbase' # type: str - self.connection_string = kwargs.get('connection_string', None) - self.cred_string = kwargs.get('cred_string', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(AzurePostgreSqlSource, self).__init__(**kwargs) + self.type = 'AzurePostgreSqlSource' # type: str + self.query = kwargs.get('query', None) -class CouchbaseTableDataset(Dataset): - """Couchbase server dataset. +class AzurePostgreSqlTableDataset(Dataset): + """Azure PostgreSQL dataset. All required parameters must be populated in order to send to Azure. @@ -4633,11 +4715,18 @@ class CouchbaseTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). + :param table_name: The table name of the Azure PostgreSQL database which includes both schema + and table. Type: string (or Expression with resultType string). :type table_name: object - """ - - _validation = { + :param table: The table name of the Azure PostgreSQL database. Type: string (or Expression with + resultType string). + :type table: object + :param schema_type_properties_schema: The schema name of the Azure PostgreSQL database. Type: + string (or Expression with resultType string). + :type schema_type_properties_schema: object + """ + + _validation = { 'type': {'required': True}, 'linked_service_name': {'required': True}, } @@ -4653,203 +4742,186 @@ class CouchbaseTableDataset(Dataset): 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } def __init__( self, **kwargs ): - super(CouchbaseTableDataset, self).__init__(**kwargs) - self.type = 'CouchbaseTable' # type: str + super(AzurePostgreSqlTableDataset, self).__init__(**kwargs) + self.type = 'AzurePostgreSqlTable' # type: str self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) -class CreateDataFlowDebugSessionRequest(msrest.serialization.Model): - """Request body structure for creating data flow debug session. - - :param data_flow_name: The name of the data flow. - :type data_flow_name: str - :param existing_cluster_id: The ID of existing Databricks cluster. - :type existing_cluster_id: str - :param cluster_timeout: Timeout setting for Databricks cluster. - :type cluster_timeout: int - :param new_cluster_name: The name of new Databricks cluster. - :type new_cluster_name: str - :param new_cluster_node_type: The type of new Databricks cluster. - :type new_cluster_node_type: str - :param data_bricks_linked_service: Data bricks linked service. - :type data_bricks_linked_service: ~azure.synapse.artifacts.models.LinkedServiceResource - """ - - _attribute_map = { - 'data_flow_name': {'key': 'dataFlowName', 'type': 'str'}, - 'existing_cluster_id': {'key': 'existingClusterId', 'type': 'str'}, - 'cluster_timeout': {'key': 'clusterTimeout', 'type': 'int'}, - 'new_cluster_name': {'key': 'newClusterName', 'type': 'str'}, - 'new_cluster_node_type': {'key': 'newClusterNodeType', 'type': 'str'}, - 'data_bricks_linked_service': {'key': 'dataBricksLinkedService', 'type': 'LinkedServiceResource'}, - } - - def __init__( - self, - **kwargs - ): - super(CreateDataFlowDebugSessionRequest, self).__init__(**kwargs) - self.data_flow_name = kwargs.get('data_flow_name', None) - self.existing_cluster_id = kwargs.get('existing_cluster_id', None) - self.cluster_timeout = kwargs.get('cluster_timeout', None) - self.new_cluster_name = kwargs.get('new_cluster_name', None) - self.new_cluster_node_type = kwargs.get('new_cluster_node_type', None) - self.data_bricks_linked_service = kwargs.get('data_bricks_linked_service', None) - - -class CreateDataFlowDebugSessionResponse(msrest.serialization.Model): - """Response body structure for creating data flow debug session. - - :param session_id: The ID of data flow debug session. - :type session_id: str - """ - - _attribute_map = { - 'session_id': {'key': 'sessionId', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(CreateDataFlowDebugSessionResponse, self).__init__(**kwargs) - self.session_id = kwargs.get('session_id', None) - - -class CreateRunResponse(msrest.serialization.Model): - """Response body with a run identifier. +class AzureQueueSink(CopySink): + """A copy activity Azure Queue sink. All required parameters must be populated in order to send to Azure. - :param run_id: Required. Identifier of a run. - :type run_id: str + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object """ _validation = { - 'run_id': {'required': True}, + 'type': {'required': True}, } _attribute_map = { - 'run_id': {'key': 'runId', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, } def __init__( self, **kwargs ): - super(CreateRunResponse, self).__init__(**kwargs) - self.run_id = kwargs['run_id'] + super(AzureQueueSink, self).__init__(**kwargs) + self.type = 'AzureQueueSink' # type: str -class CustomActivity(ExecutionActivity): - """Custom activity type. +class AzureSearchIndexDataset(Dataset): + """The Azure Search Index. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. Type of dataset.Constant filled by server. :type type: str - :param description: Activity description. + :param description: Dataset description. :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param command: Required. Command for custom activity Type: string (or Expression with - resultType string). - :type command: object - :param resource_linked_service: Resource linked service reference. - :type resource_linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference - :param folder_path: Folder path for resource files Type: string (or Expression with resultType - string). - :type folder_path: object - :param reference_objects: Reference objects. - :type reference_objects: ~azure.synapse.artifacts.models.CustomActivityReferenceObject - :param extended_properties: User defined property bag. There is no restriction on the keys or - values that can be used. The user specified custom activity has the full responsibility to - consume and interpret the content defined. - :type extended_properties: dict[str, object] - :param retention_time_in_days: The retention time for the files submitted for custom activity. - Type: double (or Expression with resultType double). - :type retention_time_in_days: object + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param index_name: Required. The name of the Azure Search Index. Type: string (or Expression + with resultType string). + :type index_name: object """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, - 'command': {'required': True}, + 'linked_service_name': {'required': True}, + 'index_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'command': {'key': 'typeProperties.command', 'type': 'object'}, - 'resource_linked_service': {'key': 'typeProperties.resourceLinkedService', 'type': 'LinkedServiceReference'}, - 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, - 'reference_objects': {'key': 'typeProperties.referenceObjects', 'type': 'CustomActivityReferenceObject'}, - 'extended_properties': {'key': 'typeProperties.extendedProperties', 'type': '{object}'}, - 'retention_time_in_days': {'key': 'typeProperties.retentionTimeInDays', 'type': 'object'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'index_name': {'key': 'typeProperties.indexName', 'type': 'object'}, } def __init__( self, **kwargs ): - super(CustomActivity, self).__init__(**kwargs) - self.type = 'Custom' # type: str - self.command = kwargs['command'] - self.resource_linked_service = kwargs.get('resource_linked_service', None) - self.folder_path = kwargs.get('folder_path', None) - self.reference_objects = kwargs.get('reference_objects', None) - self.extended_properties = kwargs.get('extended_properties', None) - self.retention_time_in_days = kwargs.get('retention_time_in_days', None) + super(AzureSearchIndexDataset, self).__init__(**kwargs) + self.type = 'AzureSearchIndex' # type: str + self.index_name = kwargs['index_name'] -class CustomActivityReferenceObject(msrest.serialization.Model): - """Reference objects for custom activity. +class AzureSearchIndexSink(CopySink): + """A copy activity Azure Search Index sink. - :param linked_services: Linked service references. - :type linked_services: list[~azure.synapse.artifacts.models.LinkedServiceReference] - :param datasets: Dataset references. - :type datasets: list[~azure.synapse.artifacts.models.DatasetReference] + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param write_behavior: Specify the write behavior when upserting documents into Azure Search + Index. Possible values include: "Merge", "Upload". + :type write_behavior: str or ~azure.synapse.artifacts.models.AzureSearchIndexWriteBehaviorType """ + _validation = { + 'type': {'required': True}, + } + _attribute_map = { - 'linked_services': {'key': 'linkedServices', 'type': '[LinkedServiceReference]'}, - 'datasets': {'key': 'datasets', 'type': '[DatasetReference]'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, } def __init__( self, **kwargs ): - super(CustomActivityReferenceObject, self).__init__(**kwargs) - self.linked_services = kwargs.get('linked_services', None) - self.datasets = kwargs.get('datasets', None) + super(AzureSearchIndexSink, self).__init__(**kwargs) + self.type = 'AzureSearchIndexSink' # type: str + self.write_behavior = kwargs.get('write_behavior', None) -class CustomDataSourceLinkedService(LinkedService): - """Custom linked service. +class AzureSearchLinkedService(LinkedService): + """Linked service for Windows Azure Search Service. All required parameters must be populated in order to send to Azure. @@ -4866,13 +4938,20 @@ class CustomDataSourceLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type_properties: Required. Custom linked service properties. - :type type_properties: object + :param url: Required. URL for Azure Search service. Type: string (or Expression with resultType + string). + :type url: object + :param key: Admin Key for Azure Search service. + :type key: ~azure.synapse.artifacts.models.SecretBase + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object """ _validation = { 'type': {'required': True}, - 'type_properties': {'required': True}, + 'url': {'required': True}, } _attribute_map = { @@ -4882,653 +4961,913 @@ class CustomDataSourceLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type_properties': {'key': 'typeProperties', 'type': 'object'}, - } + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'key': {'key': 'typeProperties.key', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } def __init__( self, **kwargs ): - super(CustomDataSourceLinkedService, self).__init__(**kwargs) - self.type = 'CustomDataSource' # type: str - self.type_properties = kwargs['type_properties'] + super(AzureSearchLinkedService, self).__init__(**kwargs) + self.type = 'AzureSearch' # type: str + self.url = kwargs['url'] + self.key = kwargs.get('key', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) -class DatabricksNotebookActivity(ExecutionActivity): - """DatabricksNotebook activity. +class AzureSqlDatabaseLinkedService(LinkedService): + """Microsoft Azure SQL Database linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. Type of linked service.Constant filled by server. :type type: str - :param description: Activity description. + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param notebook_path: Required. The absolute path of the notebook to be run in the Databricks - Workspace. This path must begin with a slash. Type: string (or Expression with resultType - string). - :type notebook_path: object - :param base_parameters: Base parameters to be used for each run of this job.If the notebook - takes a parameter that is not specified, the default value from the notebook will be used. - :type base_parameters: dict[str, object] - :param libraries: A list of libraries to be installed on the cluster that will execute the job. - :type libraries: list[dict[str, object]] + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param connection_string: Required. The connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param password: The Azure key vault secret reference of password in connection string. + :type password: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param service_principal_id: The ID of the service principal used to authenticate against Azure + SQL Database. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The key of the service principal used to authenticate against + Azure SQL Database. + :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal belongs. Type: + string (or Expression with resultType string). + :type tenant: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, - 'notebook_path': {'required': True}, + 'connection_string': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'notebook_path': {'key': 'typeProperties.notebookPath', 'type': 'object'}, - 'base_parameters': {'key': 'typeProperties.baseParameters', 'type': '{object}'}, - 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, **kwargs ): - super(DatabricksNotebookActivity, self).__init__(**kwargs) - self.type = 'DatabricksNotebook' # type: str - self.notebook_path = kwargs['notebook_path'] - self.base_parameters = kwargs.get('base_parameters', None) - self.libraries = kwargs.get('libraries', None) + super(AzureSqlDatabaseLinkedService, self).__init__(**kwargs) + self.type = 'AzureSqlDatabase' # type: str + self.connection_string = kwargs['connection_string'] + self.password = kwargs.get('password', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs.get('tenant', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) -class DatabricksSparkJarActivity(ExecutionActivity): - """DatabricksSparkJar activity. +class AzureSqlDWLinkedService(LinkedService): + """Azure SQL Data Warehouse linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. Type of linked service.Constant filled by server. :type type: str - :param description: Activity description. + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param main_class_name: Required. The full name of the class containing the main method to be - executed. This class must be contained in a JAR provided as a library. Type: string (or - Expression with resultType string). - :type main_class_name: object - :param parameters: Parameters that will be passed to the main method. - :type parameters: list[object] - :param libraries: A list of libraries to be installed on the cluster that will execute the job. - :type libraries: list[dict[str, object]] + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param connection_string: Required. The connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param password: The Azure key vault secret reference of password in connection string. + :type password: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param service_principal_id: The ID of the service principal used to authenticate against Azure + SQL Data Warehouse. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The key of the service principal used to authenticate against + Azure SQL Data Warehouse. + :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal belongs. Type: + string (or Expression with resultType string). + :type tenant: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, - 'main_class_name': {'required': True}, + 'connection_string': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'main_class_name': {'key': 'typeProperties.mainClassName', 'type': 'object'}, - 'parameters': {'key': 'typeProperties.parameters', 'type': '[object]'}, - 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, **kwargs ): - super(DatabricksSparkJarActivity, self).__init__(**kwargs) - self.type = 'DatabricksSparkJar' # type: str - self.main_class_name = kwargs['main_class_name'] - self.parameters = kwargs.get('parameters', None) - self.libraries = kwargs.get('libraries', None) + super(AzureSqlDWLinkedService, self).__init__(**kwargs) + self.type = 'AzureSqlDW' # type: str + self.connection_string = kwargs['connection_string'] + self.password = kwargs.get('password', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs.get('tenant', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) -class DatabricksSparkPythonActivity(ExecutionActivity): - """DatabricksSparkPython activity. +class AzureSqlDWTableDataset(Dataset): + """The Azure SQL Data Warehouse dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. Type of dataset.Constant filled by server. :type type: str - :param description: Activity description. + :param description: Dataset description. :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param python_file: Required. The URI of the Python file to be executed. DBFS paths are - supported. Type: string (or Expression with resultType string). - :type python_file: object - :param parameters: Command line parameters that will be passed to the Python file. - :type parameters: list[object] - :param libraries: A list of libraries to be installed on the cluster that will execute the job. - :type libraries: list[dict[str, object]] + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: This property will be retired. Please consider using schema + table + properties instead. + :type table_name: object + :param schema_type_properties_schema: The schema name of the Azure SQL Data Warehouse. Type: + string (or Expression with resultType string). + :type schema_type_properties_schema: object + :param table: The table name of the Azure SQL Data Warehouse. Type: string (or Expression with + resultType string). + :type table: object """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, - 'python_file': {'required': True}, + 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'python_file': {'key': 'typeProperties.pythonFile', 'type': 'object'}, - 'parameters': {'key': 'typeProperties.parameters', 'type': '[object]'}, - 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, } def __init__( self, **kwargs ): - super(DatabricksSparkPythonActivity, self).__init__(**kwargs) - self.type = 'DatabricksSparkPython' # type: str - self.python_file = kwargs['python_file'] - self.parameters = kwargs.get('parameters', None) - self.libraries = kwargs.get('libraries', None) + super(AzureSqlDWTableDataset, self).__init__(**kwargs) + self.type = 'AzureSqlDWTable' # type: str + self.table_name = kwargs.get('table_name', None) + self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) + self.table = kwargs.get('table', None) -class DataFlow(msrest.serialization.Model): - """Azure Synapse nested object which contains a flow with data movements and transformations. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: MappingDataFlow. +class AzureSqlMILinkedService(LinkedService): + """Azure SQL Managed Instance linked service. All required parameters must be populated in order to send to Azure. - :param type: Required. Type of data flow.Constant filled by server. + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. :type type: str - :param description: The description of the data flow. + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. :type description: str - :param annotations: List of tags that can be used for describing the data flow. + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param folder: The folder that this data flow is in. If not specified, Data flow will appear at - the root level. - :type folder: ~azure.synapse.artifacts.models.DataFlowFolder + :param connection_string: Required. The connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param password: The Azure key vault secret reference of password in connection string. + :type password: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param service_principal_id: The ID of the service principal used to authenticate against Azure + SQL Managed Instance. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The key of the service principal used to authenticate against + Azure SQL Managed Instance. + :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal belongs. Type: + string (or Expression with resultType string). + :type tenant: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object """ _validation = { 'type': {'required': True}, + 'connection_string': {'required': True}, } _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DataFlowFolder'}, - } - - _subtype_map = { - 'type': {'MappingDataFlow': 'MappingDataFlow'} + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, **kwargs ): - super(DataFlow, self).__init__(**kwargs) - self.type = None # type: Optional[str] - self.description = kwargs.get('description', None) - self.annotations = kwargs.get('annotations', None) - self.folder = kwargs.get('folder', None) + super(AzureSqlMILinkedService, self).__init__(**kwargs) + self.type = 'AzureSqlMI' # type: str + self.connection_string = kwargs['connection_string'] + self.password = kwargs.get('password', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs.get('tenant', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) -class DataFlowDebugCommandRequest(msrest.serialization.Model): - """Request body structure for data flow expression preview. +class AzureSqlMITableDataset(Dataset): + """The Azure SQL Managed Instance dataset. All required parameters must be populated in order to send to Azure. - :param session_id: Required. The ID of data flow debug session. - :type session_id: str - :param data_flow_name: The data flow which contains the debug session. - :type data_flow_name: str - :param command_name: The command name. - :type command_name: str - :param command_payload: Required. The command payload object. - :type command_payload: object + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: This property will be retired. Please consider using schema + table + properties instead. + :type table_name: object + :param schema_type_properties_schema: The schema name of the Azure SQL Managed Instance. Type: + string (or Expression with resultType string). + :type schema_type_properties_schema: object + :param table: The table name of the Azure SQL Managed Instance dataset. Type: string (or + Expression with resultType string). + :type table: object """ _validation = { - 'session_id': {'required': True}, - 'command_payload': {'required': True}, + 'type': {'required': True}, + 'linked_service_name': {'required': True}, } _attribute_map = { - 'session_id': {'key': 'sessionId', 'type': 'str'}, - 'data_flow_name': {'key': 'dataFlowName', 'type': 'str'}, - 'command_name': {'key': 'commandName', 'type': 'str'}, - 'command_payload': {'key': 'commandPayload', 'type': 'object'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, } def __init__( self, **kwargs ): - super(DataFlowDebugCommandRequest, self).__init__(**kwargs) - self.session_id = kwargs['session_id'] - self.data_flow_name = kwargs.get('data_flow_name', None) - self.command_name = kwargs.get('command_name', None) - self.command_payload = kwargs['command_payload'] + super(AzureSqlMITableDataset, self).__init__(**kwargs) + self.type = 'AzureSqlMITable' # type: str + self.table_name = kwargs.get('table_name', None) + self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) + self.table = kwargs.get('table', None) -class DataFlowDebugCommandResponse(msrest.serialization.Model): - """Response body structure of data flow result for data preview, statistics or expression preview. +class AzureSqlSink(CopySink): + """A copy activity Azure SQL sink. - :param status: The run status of data preview, statistics or expression preview. - :type status: str - :param data: The result data of data preview, statistics or expression preview. - :type data: str + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or + Expression with resultType string). + :type sql_writer_stored_procedure_name: object + :param sql_writer_table_type: SQL writer table type. Type: string (or Expression with + resultType string). + :type sql_writer_table_type: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType + string). + :type pre_copy_script: object + :param stored_procedure_parameters: SQL stored procedure parameters. + :type stored_procedure_parameters: dict[str, + ~azure.synapse.artifacts.models.StoredProcedureParameter] + :param stored_procedure_table_type_parameter_name: The stored procedure parameter name of the + table type. Type: string (or Expression with resultType string). + :type stored_procedure_table_type_parameter_name: object + :param table_option: The option to handle sink table, such as autoCreate. For now only + 'autoCreate' value is supported. Type: string (or Expression with resultType string). + :type table_option: object """ + _validation = { + 'type': {'required': True}, + } + _attribute_map = { - 'status': {'key': 'status', 'type': 'str'}, - 'data': {'key': 'data', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, + 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, } def __init__( self, **kwargs ): - super(DataFlowDebugCommandResponse, self).__init__(**kwargs) - self.status = kwargs.get('status', None) - self.data = kwargs.get('data', None) + super(AzureSqlSink, self).__init__(**kwargs) + self.type = 'AzureSqlSink' # type: str + self.sql_writer_stored_procedure_name = kwargs.get('sql_writer_stored_procedure_name', None) + self.sql_writer_table_type = kwargs.get('sql_writer_table_type', None) + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None) + self.table_option = kwargs.get('table_option', None) -class DataFlowDebugPackage(msrest.serialization.Model): - """Request body structure for starting data flow debug session. +class AzureSqlSource(TabularSource): + """A copy activity Azure SQL source. + + All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param session_id: The ID of data flow debug session. - :type session_id: str - :param data_flow: Data flow instance. - :type data_flow: ~azure.synapse.artifacts.models.DataFlowDebugResource - :param datasets: List of datasets. - :type datasets: list[~azure.synapse.artifacts.models.DatasetDebugResource] - :param linked_services: List of linked services. - :type linked_services: list[~azure.synapse.artifacts.models.LinkedServiceDebugResource] - :param staging: Staging info for debug session. - :type staging: ~azure.synapse.artifacts.models.DataFlowStagingInfo - :param debug_settings: Data flow debug settings. - :type debug_settings: ~azure.synapse.artifacts.models.DataFlowDebugPackageDebugSettings + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database + source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression + with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored procedure parameters. + Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.synapse.artifacts.models.StoredProcedureParameter] + :param produce_additional_types: Which additional types to produce. + :type produce_additional_types: object """ + _validation = { + 'type': {'required': True}, + } + _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'session_id': {'key': 'sessionId', 'type': 'str'}, - 'data_flow': {'key': 'dataFlow', 'type': 'DataFlowDebugResource'}, - 'datasets': {'key': 'datasets', 'type': '[DatasetDebugResource]'}, - 'linked_services': {'key': 'linkedServices', 'type': '[LinkedServiceDebugResource]'}, - 'staging': {'key': 'staging', 'type': 'DataFlowStagingInfo'}, - 'debug_settings': {'key': 'debugSettings', 'type': 'DataFlowDebugPackageDebugSettings'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, } def __init__( self, **kwargs ): - super(DataFlowDebugPackage, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.session_id = kwargs.get('session_id', None) - self.data_flow = kwargs.get('data_flow', None) - self.datasets = kwargs.get('datasets', None) - self.linked_services = kwargs.get('linked_services', None) - self.staging = kwargs.get('staging', None) - self.debug_settings = kwargs.get('debug_settings', None) + super(AzureSqlSource, self).__init__(**kwargs) + self.type = 'AzureSqlSource' # type: str + self.sql_reader_query = kwargs.get('sql_reader_query', None) + self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.produce_additional_types = kwargs.get('produce_additional_types', None) -class DataFlowDebugPackageDebugSettings(msrest.serialization.Model): - """Data flow debug settings. +class AzureSqlTableDataset(Dataset): + """The Azure SQL Server database dataset. - :param source_settings: Source setting for data flow debug. - :type source_settings: list[~azure.synapse.artifacts.models.DataFlowSourceSetting] - :param parameters: Data flow parameters. - :type parameters: dict[str, object] - :param dataset_parameters: Parameters for dataset. - :type dataset_parameters: object + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: This property will be retired. Please consider using schema + table + properties instead. + :type table_name: object + :param schema_type_properties_schema: The schema name of the Azure SQL database. Type: string + (or Expression with resultType string). + :type schema_type_properties_schema: object + :param table: The table name of the Azure SQL database. Type: string (or Expression with + resultType string). + :type table: object """ - _attribute_map = { - 'source_settings': {'key': 'sourceSettings', 'type': '[DataFlowSourceSetting]'}, - 'parameters': {'key': 'parameters', 'type': '{object}'}, - 'dataset_parameters': {'key': 'datasetParameters', 'type': 'object'}, + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, } - def __init__( - self, - **kwargs - ): - super(DataFlowDebugPackageDebugSettings, self).__init__(**kwargs) - self.source_settings = kwargs.get('source_settings', None) - self.parameters = kwargs.get('parameters', None) - self.dataset_parameters = kwargs.get('dataset_parameters', None) - - -class DataFlowDebugPreviewDataRequest(msrest.serialization.Model): - """Request body structure for data flow preview data. - - :param session_id: The ID of data flow debug session. - :type session_id: str - :param data_flow_name: The data flow which contains the debug session. - :type data_flow_name: str - :param stream_name: The output stream name. - :type stream_name: str - :param row_limits: The row limit for preview request. - :type row_limits: int - """ - _attribute_map = { - 'session_id': {'key': 'sessionId', 'type': 'str'}, - 'data_flow_name': {'key': 'dataFlowName', 'type': 'str'}, - 'stream_name': {'key': 'streamName', 'type': 'str'}, - 'row_limits': {'key': 'rowLimits', 'type': 'int'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, } def __init__( self, **kwargs ): - super(DataFlowDebugPreviewDataRequest, self).__init__(**kwargs) - self.session_id = kwargs.get('session_id', None) - self.data_flow_name = kwargs.get('data_flow_name', None) - self.stream_name = kwargs.get('stream_name', None) - self.row_limits = kwargs.get('row_limits', None) + super(AzureSqlTableDataset, self).__init__(**kwargs) + self.type = 'AzureSqlTable' # type: str + self.table_name = kwargs.get('table_name', None) + self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) + self.table = kwargs.get('table', None) -class DataFlowDebugQueryResponse(msrest.serialization.Model): - """Response body structure of data flow query for data preview, statistics or expression preview. +class AzureStorageLinkedService(LinkedService): + """The storage account linked service. - :param run_id: The run ID of data flow debug session. - :type run_id: str + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param connection_string: The connection string. It is mutually exclusive with sasUri property. + Type: string, SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param account_key: The Azure key vault secret reference of accountKey in connection string. + :type account_key: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param sas_uri: SAS URI of the Azure Storage resource. It is mutually exclusive with + connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. + :type sas_uri: object + :param sas_token: The Azure key vault secret reference of sasToken in sas uri. + :type sas_token: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: str """ + _validation = { + 'type': {'required': True}, + } + _attribute_map = { - 'run_id': {'key': 'runId', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, + 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, + 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, } def __init__( self, **kwargs ): - super(DataFlowDebugQueryResponse, self).__init__(**kwargs) - self.run_id = kwargs.get('run_id', None) + super(AzureStorageLinkedService, self).__init__(**kwargs) + self.type = 'AzureStorage' # type: str + self.connection_string = kwargs.get('connection_string', None) + self.account_key = kwargs.get('account_key', None) + self.sas_uri = kwargs.get('sas_uri', None) + self.sas_token = kwargs.get('sas_token', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) -class SubResourceDebugResource(msrest.serialization.Model): - """Azure Synapse nested debug resource. +class AzureTableDataset(Dataset): + """The Azure Table storage dataset. - :param name: The resource name. - :type name: str + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: Required. The table name of the Azure Table storage. Type: string (or + Expression with resultType string). + :type table_name: object """ + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + 'table_name': {'required': True}, + } + _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } def __init__( self, **kwargs ): - super(SubResourceDebugResource, self).__init__(**kwargs) - self.name = kwargs.get('name', None) + super(AzureTableDataset, self).__init__(**kwargs) + self.type = 'AzureTable' # type: str + self.table_name = kwargs['table_name'] -class DataFlowDebugResource(SubResourceDebugResource): - """Data flow debug resource. +class AzureTableSink(CopySink): + """A copy activity Azure Table sink. All required parameters must be populated in order to send to Azure. - :param name: The resource name. - :type name: str - :param properties: Required. Data flow properties. - :type properties: ~azure.synapse.artifacts.models.DataFlow + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param azure_table_default_partition_key_value: Azure Table default partition key value. Type: + string (or Expression with resultType string). + :type azure_table_default_partition_key_value: object + :param azure_table_partition_key_name: Azure Table partition key name. Type: string (or + Expression with resultType string). + :type azure_table_partition_key_name: object + :param azure_table_row_key_name: Azure Table row key name. Type: string (or Expression with + resultType string). + :type azure_table_row_key_name: object + :param azure_table_insert_type: Azure Table insert type. Type: string (or Expression with + resultType string). + :type azure_table_insert_type: object """ _validation = { - 'properties': {'required': True}, + 'type': {'required': True}, } _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'DataFlow'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'azure_table_default_partition_key_value': {'key': 'azureTableDefaultPartitionKeyValue', 'type': 'object'}, + 'azure_table_partition_key_name': {'key': 'azureTablePartitionKeyName', 'type': 'object'}, + 'azure_table_row_key_name': {'key': 'azureTableRowKeyName', 'type': 'object'}, + 'azure_table_insert_type': {'key': 'azureTableInsertType', 'type': 'object'}, } def __init__( self, **kwargs ): - super(DataFlowDebugResource, self).__init__(**kwargs) - self.properties = kwargs['properties'] + super(AzureTableSink, self).__init__(**kwargs) + self.type = 'AzureTableSink' # type: str + self.azure_table_default_partition_key_value = kwargs.get('azure_table_default_partition_key_value', None) + self.azure_table_partition_key_name = kwargs.get('azure_table_partition_key_name', None) + self.azure_table_row_key_name = kwargs.get('azure_table_row_key_name', None) + self.azure_table_insert_type = kwargs.get('azure_table_insert_type', None) -class DataFlowDebugResultResponse(msrest.serialization.Model): - """Response body structure of data flow result for data preview, statistics or expression preview. +class AzureTableSource(TabularSource): + """A copy activity Azure Table source. - :param status: The run status of data preview, statistics or expression preview. - :type status: str - :param data: The result data of data preview, statistics or expression preview. - :type data: str - """ - - _attribute_map = { - 'status': {'key': 'status', 'type': 'str'}, - 'data': {'key': 'data', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(DataFlowDebugResultResponse, self).__init__(**kwargs) - self.status = kwargs.get('status', None) - self.data = kwargs.get('data', None) - - -class DataFlowDebugSessionInfo(msrest.serialization.Model): - """Data flow debug session info. + All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param data_flow_name: The name of the data flow. - :type data_flow_name: str - :param compute_type: Compute type of the cluster. - :type compute_type: str - :param core_count: Core count of the cluster. - :type core_count: int - :param node_count: Node count of the cluster. (deprecated property). - :type node_count: int - :param integration_runtime_name: Attached integration runtime name of data flow debug session. - :type integration_runtime_name: str - :param session_id: The ID of data flow debug session. - :type session_id: str - :param start_time: Start time of data flow debug session. - :type start_time: str - :param time_to_live_in_minutes: Compute type of the cluster. - :type time_to_live_in_minutes: int - :param last_activity_time: Last activity time of data flow debug session. - :type last_activity_time: str - """ - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'data_flow_name': {'key': 'dataFlowName', 'type': 'str'}, - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'core_count': {'key': 'coreCount', 'type': 'int'}, - 'node_count': {'key': 'nodeCount', 'type': 'int'}, - 'integration_runtime_name': {'key': 'integrationRuntimeName', 'type': 'str'}, - 'session_id': {'key': 'sessionId', 'type': 'str'}, - 'start_time': {'key': 'startTime', 'type': 'str'}, - 'time_to_live_in_minutes': {'key': 'timeToLiveInMinutes', 'type': 'int'}, - 'last_activity_time': {'key': 'lastActivityTime', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(DataFlowDebugSessionInfo, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.data_flow_name = kwargs.get('data_flow_name', None) - self.compute_type = kwargs.get('compute_type', None) - self.core_count = kwargs.get('core_count', None) - self.node_count = kwargs.get('node_count', None) - self.integration_runtime_name = kwargs.get('integration_runtime_name', None) - self.session_id = kwargs.get('session_id', None) - self.start_time = kwargs.get('start_time', None) - self.time_to_live_in_minutes = kwargs.get('time_to_live_in_minutes', None) - self.last_activity_time = kwargs.get('last_activity_time', None) - - -class DataFlowDebugStatisticsRequest(msrest.serialization.Model): - """Request body structure for data flow statistics. - - :param session_id: The ID of data flow debug session. - :type session_id: str - :param data_flow_name: The data flow which contains the debug session. - :type data_flow_name: str - :param stream_name: The output stream name. - :type stream_name: str - :param columns: List of column names. - :type columns: list[str] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param azure_table_source_query: Azure Table source query. Type: string (or Expression with + resultType string). + :type azure_table_source_query: object + :param azure_table_source_ignore_table_not_found: Azure Table source ignore table not found. + Type: boolean (or Expression with resultType boolean). + :type azure_table_source_ignore_table_not_found: object """ - _attribute_map = { - 'session_id': {'key': 'sessionId', 'type': 'str'}, - 'data_flow_name': {'key': 'dataFlowName', 'type': 'str'}, - 'stream_name': {'key': 'streamName', 'type': 'str'}, - 'columns': {'key': 'columns', 'type': '[str]'}, + _validation = { + 'type': {'required': True}, } - def __init__( - self, - **kwargs - ): - super(DataFlowDebugStatisticsRequest, self).__init__(**kwargs) - self.session_id = kwargs.get('session_id', None) - self.data_flow_name = kwargs.get('data_flow_name', None) - self.stream_name = kwargs.get('stream_name', None) - self.columns = kwargs.get('columns', None) - - -class DataFlowFolder(msrest.serialization.Model): - """The folder that this data flow is in. If not specified, Data flow will appear at the root level. - - :param name: The name of the folder that this data flow is in. - :type name: str - """ - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'azure_table_source_query': {'key': 'azureTableSourceQuery', 'type': 'object'}, + 'azure_table_source_ignore_table_not_found': {'key': 'azureTableSourceIgnoreTableNotFound', 'type': 'object'}, } def __init__( self, **kwargs ): - super(DataFlowFolder, self).__init__(**kwargs) - self.name = kwargs.get('name', None) + super(AzureTableSource, self).__init__(**kwargs) + self.type = 'AzureTableSource' # type: str + self.azure_table_source_query = kwargs.get('azure_table_source_query', None) + self.azure_table_source_ignore_table_not_found = kwargs.get('azure_table_source_ignore_table_not_found', None) -class DataFlowListResponse(msrest.serialization.Model): - """A list of data flow resources. +class AzureTableStorageLinkedService(LinkedService): + """The azure table storage linked service. All required parameters must be populated in order to send to Azure. - :param value: Required. List of data flows. - :type value: list[~azure.synapse.artifacts.models.DataFlowResource] - :param next_link: The link to the next page of results, if any remaining results exist. - :type next_link: str + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param connection_string: The connection string. It is mutually exclusive with sasUri property. + Type: string, SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param account_key: The Azure key vault secret reference of accountKey in connection string. + :type account_key: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param sas_uri: SAS URI of the Azure Storage resource. It is mutually exclusive with + connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. + :type sas_uri: object + :param sas_token: The Azure key vault secret reference of sasToken in sas uri. + :type sas_token: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: str """ _validation = { - 'value': {'required': True}, + 'type': {'required': True}, } _attribute_map = { - 'value': {'key': 'value', 'type': '[DataFlowResource]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, + 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, + 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, } def __init__( self, **kwargs ): - super(DataFlowListResponse, self).__init__(**kwargs) - self.value = kwargs['value'] - self.next_link = kwargs.get('next_link', None) + super(AzureTableStorageLinkedService, self).__init__(**kwargs) + self.type = 'AzureTableStorage' # type: str + self.connection_string = kwargs.get('connection_string', None) + self.account_key = kwargs.get('account_key', None) + self.sas_uri = kwargs.get('sas_uri', None) + self.sas_token = kwargs.get('sas_token', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) -class DataFlowReference(msrest.serialization.Model): - """Data flow reference type. +class BigDataPoolReference(msrest.serialization.Model): + """Big data pool reference. All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Data flow reference type. Possible values include: "DataFlowReference". - :type type: str or ~azure.synapse.artifacts.models.DataFlowReferenceType - :param reference_name: Required. Reference data flow name. + :param type: Required. Big data pool reference type. Possible values include: + "BigDataPoolReference". + :type type: str or ~azure.synapse.artifacts.models.BigDataPoolReferenceType + :param reference_name: Required. Reference big data pool name. :type reference_name: str - :param dataset_parameters: Reference data flow parameters from dataset. - :type dataset_parameters: object """ _validation = { @@ -5537,438 +5876,614 @@ class DataFlowReference(msrest.serialization.Model): } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'reference_name': {'key': 'referenceName', 'type': 'str'}, - 'dataset_parameters': {'key': 'datasetParameters', 'type': 'object'}, } def __init__( self, **kwargs ): - super(DataFlowReference, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) + super(BigDataPoolReference, self).__init__(**kwargs) self.type = kwargs['type'] self.reference_name = kwargs['reference_name'] - self.dataset_parameters = kwargs.get('dataset_parameters', None) -class SubResource(msrest.serialization.Model): - """Azure Synapse nested resource, which belongs to a workspace. +class TrackedResource(Resource): + """The resource model definition for a ARM tracked top level resource. Variables are only populated by the server, and will be ignored when sending a request. - :ivar id: The resource identifier. + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. :vartype id: str - :ivar name: The resource name. + :ivar name: The name of the resource. :vartype name: str - :ivar type: The resource type. + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str + :param tags: A set of tags. Resource tags. + :type tags: dict[str, str] + :param location: Required. The geo-location where the resource lives. + :type location: str """ _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, 'type': {'readonly': True}, - 'etag': {'readonly': True}, + 'location': {'required': True}, } _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'location': {'key': 'location', 'type': 'str'}, } def __init__( self, **kwargs ): - super(SubResource, self).__init__(**kwargs) - self.id = None - self.name = None - self.type = None - self.etag = None + super(TrackedResource, self).__init__(**kwargs) + self.tags = kwargs.get('tags', None) + self.location = kwargs['location'] -class DataFlowResource(SubResource): - """Data flow resource type. +class BigDataPoolResourceInfo(TrackedResource): + """A Big Data pool. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :ivar id: The resource identifier. + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. :vartype id: str - :ivar name: The resource name. + :ivar name: The name of the resource. :vartype name: str - :ivar type: The resource type. + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - :param properties: Required. Data flow properties. - :type properties: ~azure.synapse.artifacts.models.DataFlow + :param tags: A set of tags. Resource tags. + :type tags: dict[str, str] + :param location: Required. The geo-location where the resource lives. + :type location: str + :param provisioning_state: The state of the Big Data pool. + :type provisioning_state: str + :param auto_scale: Auto-scaling properties. + :type auto_scale: ~azure.synapse.artifacts.models.AutoScaleProperties + :param creation_date: The time when the Big Data pool was created. + :type creation_date: ~datetime.datetime + :param auto_pause: Auto-pausing properties. + :type auto_pause: ~azure.synapse.artifacts.models.AutoPauseProperties + :param is_compute_isolation_enabled: Whether compute isolation is required or not. + :type is_compute_isolation_enabled: bool + :param spark_events_folder: The Spark events folder. + :type spark_events_folder: str + :param node_count: The number of nodes in the Big Data pool. + :type node_count: int + :param library_requirements: Library version requirements. + :type library_requirements: ~azure.synapse.artifacts.models.LibraryRequirements + :param spark_version: The Apache Spark version. + :type spark_version: str + :param default_spark_log_folder: The default folder where Spark logs will be written. + :type default_spark_log_folder: str + :param node_size: The level of compute power that each node in the Big Data pool has. Possible + values include: "None", "Small", "Medium", "Large", "XLarge", "XXLarge". + :type node_size: str or ~azure.synapse.artifacts.models.NodeSize + :param node_size_family: The kind of nodes that the Big Data pool provides. Possible values + include: "None", "MemoryOptimized". + :type node_size_family: str or ~azure.synapse.artifacts.models.NodeSizeFamily """ _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'properties': {'required': True}, + 'location': {'required': True}, } _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'DataFlow'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'location': {'key': 'location', 'type': 'str'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + 'auto_scale': {'key': 'properties.autoScale', 'type': 'AutoScaleProperties'}, + 'creation_date': {'key': 'properties.creationDate', 'type': 'iso-8601'}, + 'auto_pause': {'key': 'properties.autoPause', 'type': 'AutoPauseProperties'}, + 'is_compute_isolation_enabled': {'key': 'properties.isComputeIsolationEnabled', 'type': 'bool'}, + 'spark_events_folder': {'key': 'properties.sparkEventsFolder', 'type': 'str'}, + 'node_count': {'key': 'properties.nodeCount', 'type': 'int'}, + 'library_requirements': {'key': 'properties.libraryRequirements', 'type': 'LibraryRequirements'}, + 'spark_version': {'key': 'properties.sparkVersion', 'type': 'str'}, + 'default_spark_log_folder': {'key': 'properties.defaultSparkLogFolder', 'type': 'str'}, + 'node_size': {'key': 'properties.nodeSize', 'type': 'str'}, + 'node_size_family': {'key': 'properties.nodeSizeFamily', 'type': 'str'}, } def __init__( self, **kwargs ): - super(DataFlowResource, self).__init__(**kwargs) - self.properties = kwargs['properties'] - + super(BigDataPoolResourceInfo, self).__init__(**kwargs) + self.provisioning_state = kwargs.get('provisioning_state', None) + self.auto_scale = kwargs.get('auto_scale', None) + self.creation_date = kwargs.get('creation_date', None) + self.auto_pause = kwargs.get('auto_pause', None) + self.is_compute_isolation_enabled = kwargs.get('is_compute_isolation_enabled', None) + self.spark_events_folder = kwargs.get('spark_events_folder', None) + self.node_count = kwargs.get('node_count', None) + self.library_requirements = kwargs.get('library_requirements', None) + self.spark_version = kwargs.get('spark_version', None) + self.default_spark_log_folder = kwargs.get('default_spark_log_folder', None) + self.node_size = kwargs.get('node_size', None) + self.node_size_family = kwargs.get('node_size_family', None) -class Transformation(msrest.serialization.Model): - """A data flow transformation. - All required parameters must be populated in order to send to Azure. +class BigDataPoolResourceInfoListResult(msrest.serialization.Model): + """Collection of Big Data pool information. - :param name: Required. Transformation name. - :type name: str - :param description: Transformation description. - :type description: str + :param next_link: Link to the next page of results. + :type next_link: str + :param value: List of Big Data pools. + :type value: list[~azure.synapse.artifacts.models.BigDataPoolResourceInfo] """ - _validation = { - 'name': {'required': True}, - } - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'value': {'key': 'value', 'type': '[BigDataPoolResourceInfo]'}, } def __init__( self, **kwargs ): - super(Transformation, self).__init__(**kwargs) - self.name = kwargs['name'] - self.description = kwargs.get('description', None) + super(BigDataPoolResourceInfoListResult, self).__init__(**kwargs) + self.next_link = kwargs.get('next_link', None) + self.value = kwargs.get('value', None) -class DataFlowSink(Transformation): - """Transformation for data flow sink. +class BinaryDataset(Dataset): + """Binary dataset. All required parameters must be populated in order to send to Azure. - :param name: Required. Transformation name. - :type name: str - :param description: Transformation description. + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. :type description: str - :param dataset: Dataset reference. - :type dataset: ~azure.synapse.artifacts.models.DatasetReference + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param location: The location of the Binary storage. + :type location: ~azure.synapse.artifacts.models.DatasetLocation + :param compression: The data compression method used for the binary dataset. + :type compression: ~azure.synapse.artifacts.models.DatasetCompression """ _validation = { - 'name': {'required': True}, + 'type': {'required': True}, + 'linked_service_name': {'required': True}, } _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, - 'dataset': {'key': 'dataset', 'type': 'DatasetReference'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, } def __init__( self, **kwargs ): - super(DataFlowSink, self).__init__(**kwargs) - self.dataset = kwargs.get('dataset', None) + super(BinaryDataset, self).__init__(**kwargs) + self.type = 'Binary' # type: str + self.location = kwargs.get('location', None) + self.compression = kwargs.get('compression', None) -class DataFlowSource(Transformation): - """Transformation for data flow source. +class BinarySink(CopySink): + """A copy activity Binary sink. All required parameters must be populated in order to send to Azure. - :param name: Required. Transformation name. - :type name: str - :param description: Transformation description. - :type description: str - :param dataset: Dataset reference. - :type dataset: ~azure.synapse.artifacts.models.DatasetReference + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param store_settings: Binary store settings. + :type store_settings: ~azure.synapse.artifacts.models.StoreWriteSettings """ _validation = { - 'name': {'required': True}, + 'type': {'required': True}, } _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'dataset': {'key': 'dataset', 'type': 'DatasetReference'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, } def __init__( self, **kwargs ): - super(DataFlowSource, self).__init__(**kwargs) - self.dataset = kwargs.get('dataset', None) + super(BinarySink, self).__init__(**kwargs) + self.type = 'BinarySink' # type: str + self.store_settings = kwargs.get('store_settings', None) -class DataFlowSourceSetting(msrest.serialization.Model): - """Definition of data flow source setting for debug. +class BinarySource(CopySource): + """A copy activity Binary source. + + All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param source_name: The data flow source name. - :type source_name: str - :param row_limit: Defines the row limit of data flow source in debug. - :type row_limit: int + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param store_settings: Binary store settings. + :type store_settings: ~azure.synapse.artifacts.models.StoreReadSettings """ + _validation = { + 'type': {'required': True}, + } + _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_name': {'key': 'sourceName', 'type': 'str'}, - 'row_limit': {'key': 'rowLimit', 'type': 'int'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, } def __init__( self, **kwargs ): - super(DataFlowSourceSetting, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.source_name = kwargs.get('source_name', None) - self.row_limit = kwargs.get('row_limit', None) - - -class DataFlowStagingInfo(msrest.serialization.Model): - """Staging info for execute data flow activity. - - :param linked_service: Staging linked service reference. - :type linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference - :param folder_path: Folder path for staging blob. - :type folder_path: str - """ + super(BinarySource, self).__init__(**kwargs) + self.type = 'BinarySource' # type: str + self.store_settings = kwargs.get('store_settings', None) - _attribute_map = { - 'linked_service': {'key': 'linkedService', 'type': 'LinkedServiceReference'}, - 'folder_path': {'key': 'folderPath', 'type': 'str'}, - } - def __init__( - self, - **kwargs - ): - super(DataFlowStagingInfo, self).__init__(**kwargs) - self.linked_service = kwargs.get('linked_service', None) - self.folder_path = kwargs.get('folder_path', None) +class Trigger(msrest.serialization.Model): + """Azure Synapse nested object which contains information about creating pipeline run. + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: ChainingTrigger, MultiplePipelineTrigger, RerunTumblingWindowTrigger, TumblingWindowTrigger. -class DataLakeAnalyticsUSQLActivity(ExecutionActivity): - """Data Lake Analytics U-SQL activity. + Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. Trigger type.Constant filled by server. :type type: str - :param description: Activity description. + :param description: Trigger description. :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param script_path: Required. Case-sensitive path to folder that contains the U-SQL script. - Type: string (or Expression with resultType string). - :type script_path: object - :param script_linked_service: Required. Script linked service reference. - :type script_linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference - :param degree_of_parallelism: The maximum number of nodes simultaneously used to run the job. - Default value is 1. Type: integer (or Expression with resultType integer), minimum: 1. - :type degree_of_parallelism: object - :param priority: Determines which jobs out of all that are queued should be selected to run - first. The lower the number, the higher the priority. Default value is 1000. Type: integer (or - Expression with resultType integer), minimum: 1. - :type priority: object - :param parameters: Parameters for U-SQL job request. - :type parameters: dict[str, object] - :param runtime_version: Runtime version of the U-SQL engine to use. Type: string (or Expression - with resultType string). - :type runtime_version: object - :param compilation_mode: Compilation mode of U-SQL. Must be one of these values : Semantic, - Full and SingleBox. Type: string (or Expression with resultType string). - :type compilation_mode: object + :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are + called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". + :vartype runtime_state: str or ~azure.synapse.artifacts.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the trigger. + :type annotations: list[object] """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, - 'script_path': {'required': True}, - 'script_linked_service': {'required': True}, + 'runtime_state': {'readonly': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'script_path': {'key': 'typeProperties.scriptPath', 'type': 'object'}, - 'script_linked_service': {'key': 'typeProperties.scriptLinkedService', 'type': 'LinkedServiceReference'}, - 'degree_of_parallelism': {'key': 'typeProperties.degreeOfParallelism', 'type': 'object'}, - 'priority': {'key': 'typeProperties.priority', 'type': 'object'}, - 'parameters': {'key': 'typeProperties.parameters', 'type': '{object}'}, - 'runtime_version': {'key': 'typeProperties.runtimeVersion', 'type': 'object'}, - 'compilation_mode': {'key': 'typeProperties.compilationMode', 'type': 'object'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + } + + _subtype_map = { + 'type': {'ChainingTrigger': 'ChainingTrigger', 'MultiplePipelineTrigger': 'MultiplePipelineTrigger', 'RerunTumblingWindowTrigger': 'RerunTumblingWindowTrigger', 'TumblingWindowTrigger': 'TumblingWindowTrigger'} } def __init__( self, **kwargs ): - super(DataLakeAnalyticsUSQLActivity, self).__init__(**kwargs) - self.type = 'DataLakeAnalyticsU-SQL' # type: str - self.script_path = kwargs['script_path'] - self.script_linked_service = kwargs['script_linked_service'] - self.degree_of_parallelism = kwargs.get('degree_of_parallelism', None) - self.priority = kwargs.get('priority', None) - self.parameters = kwargs.get('parameters', None) - self.runtime_version = kwargs.get('runtime_version', None) - self.compilation_mode = kwargs.get('compilation_mode', None) + super(Trigger, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = 'Trigger' # type: str + self.description = kwargs.get('description', None) + self.runtime_state = None + self.annotations = kwargs.get('annotations', None) -class DatasetCompression(msrest.serialization.Model): - """The compression method used on a dataset. +class MultiplePipelineTrigger(Trigger): + """Base class for all triggers that support one to many model for trigger to pipeline. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: DatasetBZip2Compression, DatasetDeflateCompression, DatasetGZipCompression, DatasetZipDeflateCompression. + sub-classes are: BlobEventsTrigger, BlobTrigger, ScheduleTrigger. + + Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset compression.Constant filled by server. + :param type: Required. Trigger type.Constant filled by server. :type type: str + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are + called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". + :vartype runtime_state: str or ~azure.synapse.artifacts.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the trigger. + :type annotations: list[object] + :param pipelines: Pipelines that need to be started. + :type pipelines: list[~azure.synapse.artifacts.models.TriggerPipelineReference] """ _validation = { 'type': {'required': True}, + 'runtime_state': {'readonly': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, } _subtype_map = { - 'type': {'BZip2': 'DatasetBZip2Compression', 'Deflate': 'DatasetDeflateCompression', 'GZip': 'DatasetGZipCompression', 'ZipDeflate': 'DatasetZipDeflateCompression'} + 'type': {'BlobEventsTrigger': 'BlobEventsTrigger', 'BlobTrigger': 'BlobTrigger', 'ScheduleTrigger': 'ScheduleTrigger'} } def __init__( self, **kwargs ): - super(DatasetCompression, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.type = 'DatasetCompression' # type: str + super(MultiplePipelineTrigger, self).__init__(**kwargs) + self.type = 'MultiplePipelineTrigger' # type: str + self.pipelines = kwargs.get('pipelines', None) -class DatasetBZip2Compression(DatasetCompression): - """The BZip2 compression method used on a dataset. +class BlobEventsTrigger(MultiplePipelineTrigger): + """Trigger that runs every time a Blob event occurs. + + Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset compression.Constant filled by server. + :param type: Required. Trigger type.Constant filled by server. :type type: str + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are + called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". + :vartype runtime_state: str or ~azure.synapse.artifacts.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the trigger. + :type annotations: list[object] + :param pipelines: Pipelines that need to be started. + :type pipelines: list[~azure.synapse.artifacts.models.TriggerPipelineReference] + :param blob_path_begins_with: The blob path must begin with the pattern provided for trigger to + fire. For example, '/records/blobs/december/' will only fire the trigger for blobs in the + december folder under the records container. At least one of these must be provided: + blobPathBeginsWith, blobPathEndsWith. + :type blob_path_begins_with: str + :param blob_path_ends_with: The blob path must end with the pattern provided for trigger to + fire. For example, 'december/boxes.csv' will only fire the trigger for blobs named boxes in a + december folder. At least one of these must be provided: blobPathBeginsWith, blobPathEndsWith. + :type blob_path_ends_with: str + :param ignore_empty_blobs: If set to true, blobs with zero bytes will be ignored. + :type ignore_empty_blobs: bool + :param events: Required. The type of events that cause this trigger to fire. + :type events: list[str or ~azure.synapse.artifacts.models.BlobEventTypes] + :param scope: Required. The ARM resource ID of the Storage Account. + :type scope: str """ _validation = { 'type': {'required': True}, + 'runtime_state': {'readonly': True}, + 'events': {'required': True}, + 'scope': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, + 'blob_path_begins_with': {'key': 'typeProperties.blobPathBeginsWith', 'type': 'str'}, + 'blob_path_ends_with': {'key': 'typeProperties.blobPathEndsWith', 'type': 'str'}, + 'ignore_empty_blobs': {'key': 'typeProperties.ignoreEmptyBlobs', 'type': 'bool'}, + 'events': {'key': 'typeProperties.events', 'type': '[str]'}, + 'scope': {'key': 'typeProperties.scope', 'type': 'str'}, } def __init__( self, **kwargs ): - super(DatasetBZip2Compression, self).__init__(**kwargs) - self.type = 'BZip2' # type: str + super(BlobEventsTrigger, self).__init__(**kwargs) + self.type = 'BlobEventsTrigger' # type: str + self.blob_path_begins_with = kwargs.get('blob_path_begins_with', None) + self.blob_path_ends_with = kwargs.get('blob_path_ends_with', None) + self.ignore_empty_blobs = kwargs.get('ignore_empty_blobs', None) + self.events = kwargs['events'] + self.scope = kwargs['scope'] -class DatasetDebugResource(SubResourceDebugResource): - """Dataset debug resource. +class BlobSink(CopySink): + """A copy activity Azure Blob sink. All required parameters must be populated in order to send to Azure. - :param name: The resource name. - :type name: str - :param properties: Required. Dataset properties. - :type properties: ~azure.synapse.artifacts.models.Dataset + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param blob_writer_overwrite_files: Blob writer overwrite files. Type: boolean (or Expression + with resultType boolean). + :type blob_writer_overwrite_files: object + :param blob_writer_date_time_format: Blob writer date time format. Type: string (or Expression + with resultType string). + :type blob_writer_date_time_format: object + :param blob_writer_add_header: Blob writer add header. Type: boolean (or Expression with + resultType boolean). + :type blob_writer_add_header: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object """ _validation = { - 'properties': {'required': True}, + 'type': {'required': True}, } _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'Dataset'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'blob_writer_overwrite_files': {'key': 'blobWriterOverwriteFiles', 'type': 'object'}, + 'blob_writer_date_time_format': {'key': 'blobWriterDateTimeFormat', 'type': 'object'}, + 'blob_writer_add_header': {'key': 'blobWriterAddHeader', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } def __init__( self, **kwargs ): - super(DatasetDebugResource, self).__init__(**kwargs) - self.properties = kwargs['properties'] + super(BlobSink, self).__init__(**kwargs) + self.type = 'BlobSink' # type: str + self.blob_writer_overwrite_files = kwargs.get('blob_writer_overwrite_files', None) + self.blob_writer_date_time_format = kwargs.get('blob_writer_date_time_format', None) + self.blob_writer_add_header = kwargs.get('blob_writer_add_header', None) + self.copy_behavior = kwargs.get('copy_behavior', None) -class DatasetDeflateCompression(DatasetCompression): - """The Deflate compression method used on a dataset. +class BlobSource(CopySource): + """A copy activity Azure Blob source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset compression.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param level: The Deflate compression level. Possible values include: "Optimal", "Fastest". - :type level: str or ~azure.synapse.artifacts.models.DatasetCompressionLevel + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param treat_empty_as_null: Treat empty as null. Type: boolean (or Expression with resultType + boolean). + :type treat_empty_as_null: object + :param skip_header_line_count: Number of header lines to skip from each blob. Type: integer (or + Expression with resultType integer). + :type skip_header_line_count: object + :param recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :type recursive: object """ _validation = { @@ -5978,118 +6493,191 @@ class DatasetDeflateCompression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, + 'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, } def __init__( self, **kwargs ): - super(DatasetDeflateCompression, self).__init__(**kwargs) - self.type = 'Deflate' # type: str - self.level = kwargs.get('level', None) - - -class DatasetFolder(msrest.serialization.Model): - """The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - - :param name: The name of the folder that this Dataset is in. - :type name: str - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - } + super(BlobSource, self).__init__(**kwargs) + self.type = 'BlobSource' # type: str + self.treat_empty_as_null = kwargs.get('treat_empty_as_null', None) + self.skip_header_line_count = kwargs.get('skip_header_line_count', None) + self.recursive = kwargs.get('recursive', None) - def __init__( - self, - **kwargs - ): - super(DatasetFolder, self).__init__(**kwargs) - self.name = kwargs.get('name', None) +class BlobTrigger(MultiplePipelineTrigger): + """Trigger that runs every time the selected Blob container changes. -class DatasetGZipCompression(DatasetCompression): - """The GZip compression method used on a dataset. + Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset compression.Constant filled by server. + :param type: Required. Trigger type.Constant filled by server. :type type: str - :param level: The GZip compression level. Possible values include: "Optimal", "Fastest". - :type level: str or ~azure.synapse.artifacts.models.DatasetCompressionLevel + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are + called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". + :vartype runtime_state: str or ~azure.synapse.artifacts.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the trigger. + :type annotations: list[object] + :param pipelines: Pipelines that need to be started. + :type pipelines: list[~azure.synapse.artifacts.models.TriggerPipelineReference] + :param folder_path: Required. The path of the container/folder that will trigger the pipeline. + :type folder_path: str + :param max_concurrency: Required. The max number of parallel files to handle when it is + triggered. + :type max_concurrency: int + :param linked_service: Required. The Azure Storage linked service reference. + :type linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference """ _validation = { 'type': {'required': True}, + 'runtime_state': {'readonly': True}, + 'folder_path': {'required': True}, + 'max_concurrency': {'required': True}, + 'linked_service': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, + 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'str'}, + 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, + 'linked_service': {'key': 'typeProperties.linkedService', 'type': 'LinkedServiceReference'}, } def __init__( self, **kwargs ): - super(DatasetGZipCompression, self).__init__(**kwargs) - self.type = 'GZip' # type: str - self.level = kwargs.get('level', None) + super(BlobTrigger, self).__init__(**kwargs) + self.type = 'BlobTrigger' # type: str + self.folder_path = kwargs['folder_path'] + self.max_concurrency = kwargs['max_concurrency'] + self.linked_service = kwargs['linked_service'] -class DatasetListResponse(msrest.serialization.Model): - """A list of dataset resources. +class CassandraLinkedService(LinkedService): + """Linked service for Cassandra data source. All required parameters must be populated in order to send to Azure. - :param value: Required. List of datasets. - :type value: list[~azure.synapse.artifacts.models.DatasetResource] - :param next_link: The link to the next page of results, if any remaining results exist. - :type next_link: str + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param host: Required. Host name for connection. Type: string (or Expression with resultType + string). + :type host: object + :param authentication_type: AuthenticationType to be used for connection. Type: string (or + Expression with resultType string). + :type authentication_type: object + :param port: The port for the connection. Type: integer (or Expression with resultType + integer). + :type port: object + :param username: Username for authentication. Type: string (or Expression with resultType + string). + :type username: object + :param password: Password for authentication. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object """ _validation = { - 'value': {'required': True}, + 'type': {'required': True}, + 'host': {'required': True}, } _attribute_map = { - 'value': {'key': 'value', 'type': '[DatasetResource]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, **kwargs ): - super(DatasetListResponse, self).__init__(**kwargs) - self.value = kwargs['value'] - self.next_link = kwargs.get('next_link', None) - + super(CassandraLinkedService, self).__init__(**kwargs) + self.type = 'Cassandra' # type: str + self.host = kwargs['host'] + self.authentication_type = kwargs.get('authentication_type', None) + self.port = kwargs.get('port', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) -class DatasetLocation(msrest.serialization.Model): - """Dataset location. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: . +class CassandraSource(TabularSource): + """A copy activity source for a Cassandra database. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or Expression with - resultType string). - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType - string). - :type file_name: object + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: Database query. Should be a SQL-92 query expression or Cassandra Query Language + (CQL) command. Type: string (or Expression with resultType string). + :type query: object + :param consistency_level: The consistency level specifies how many Cassandra servers must + respond to a read request before returning data to the client application. Cassandra checks the + specified number of Cassandra servers for data to satisfy the read request. Must be one of + cassandraSourceReadConsistencyLevels. The default value is 'ONE'. It is case-insensitive. + Possible values include: "ALL", "EACH_QUORUM", "QUORUM", "LOCAL_QUORUM", "ONE", "TWO", "THREE", + "LOCAL_ONE", "SERIAL", "LOCAL_SERIAL". + :type consistency_level: str or + ~azure.synapse.artifacts.models.CassandraSourceReadConsistencyLevels """ _validation = { @@ -6099,220 +6687,187 @@ class DatasetLocation(msrest.serialization.Model): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - } - - _subtype_map = { - 'type': {} + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + 'consistency_level': {'key': 'consistencyLevel', 'type': 'str'}, } def __init__( self, **kwargs ): - super(DatasetLocation, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.type = 'DatasetLocation' # type: str - self.folder_path = kwargs.get('folder_path', None) - self.file_name = kwargs.get('file_name', None) - + super(CassandraSource, self).__init__(**kwargs) + self.type = 'CassandraSource' # type: str + self.query = kwargs.get('query', None) + self.consistency_level = kwargs.get('consistency_level', None) -class DatasetReference(msrest.serialization.Model): - """Dataset reference type. + +class CassandraTableDataset(Dataset): + """The Cassandra database dataset. All required parameters must be populated in order to send to Azure. - :param type: Required. Dataset reference type. Possible values include: "DatasetReference". - :type type: str or ~azure.synapse.artifacts.models.DatasetReferenceType - :param reference_name: Required. Reference dataset name. - :type reference_name: str - :param parameters: Arguments for dataset. - :type parameters: dict[str, object] + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: The table name of the Cassandra database. Type: string (or Expression with + resultType string). + :type table_name: object + :param keyspace: The keyspace of the Cassandra database. Type: string (or Expression with + resultType string). + :type keyspace: object """ _validation = { 'type': {'required': True}, - 'reference_name': {'required': True}, + 'linked_service_name': {'required': True}, } _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'reference_name': {'key': 'referenceName', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'keyspace': {'key': 'typeProperties.keyspace', 'type': 'object'}, } def __init__( self, **kwargs ): - super(DatasetReference, self).__init__(**kwargs) - self.type = kwargs['type'] - self.reference_name = kwargs['reference_name'] - self.parameters = kwargs.get('parameters', None) + super(CassandraTableDataset, self).__init__(**kwargs) + self.type = 'CassandraTable' # type: str + self.table_name = kwargs.get('table_name', None) + self.keyspace = kwargs.get('keyspace', None) -class DatasetResource(SubResource): - """Dataset resource type. +class ChainingTrigger(Trigger): + """Trigger that allows the referenced pipeline to depend on other pipeline runs based on runDimension Name/Value pairs. Upstream pipelines should declare the same runDimension Name and their runs should have the values for those runDimensions. The referenced pipeline run would be triggered if the values for the runDimension match for all upstream pipeline runs. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - :param properties: Required. Dataset properties. - :type properties: ~azure.synapse.artifacts.models.Dataset - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'Dataset'}, - } - - def __init__( - self, - **kwargs - ): - super(DatasetResource, self).__init__(**kwargs) - self.properties = kwargs['properties'] - - -class DatasetZipDeflateCompression(DatasetCompression): - """The ZipDeflate compression method used on a dataset. - - All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset compression.Constant filled by server. + :param type: Required. Trigger type.Constant filled by server. :type type: str - :param level: The ZipDeflate compression level. Possible values include: "Optimal", "Fastest". - :type level: str or ~azure.synapse.artifacts.models.DatasetCompressionLevel + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are + called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". + :vartype runtime_state: str or ~azure.synapse.artifacts.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the trigger. + :type annotations: list[object] + :param pipeline: Required. Pipeline for which runs are created when all upstream pipelines + complete successfully. + :type pipeline: ~azure.synapse.artifacts.models.TriggerPipelineReference + :param depends_on: Required. Upstream Pipelines. + :type depends_on: list[~azure.synapse.artifacts.models.PipelineReference] + :param run_dimension: Required. Run Dimension property that needs to be emitted by upstream + pipelines. + :type run_dimension: str """ _validation = { 'type': {'required': True}, + 'runtime_state': {'readonly': True}, + 'pipeline': {'required': True}, + 'depends_on': {'required': True}, + 'run_dimension': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'pipeline': {'key': 'pipeline', 'type': 'TriggerPipelineReference'}, + 'depends_on': {'key': 'typeProperties.dependsOn', 'type': '[PipelineReference]'}, + 'run_dimension': {'key': 'typeProperties.runDimension', 'type': 'str'}, } def __init__( self, **kwargs ): - super(DatasetZipDeflateCompression, self).__init__(**kwargs) - self.type = 'ZipDeflate' # type: str - self.level = kwargs.get('level', None) + super(ChainingTrigger, self).__init__(**kwargs) + self.type = 'ChainingTrigger' # type: str + self.pipeline = kwargs['pipeline'] + self.depends_on = kwargs['depends_on'] + self.run_dimension = kwargs['run_dimension'] -class Db2LinkedService(LinkedService): - """Linked service for DB2 data source. +class CloudError(msrest.serialization.Model): + """The object that defines the structure of an Azure Synapse error response. All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param server: Required. Server name for connection. Type: string (or Expression with - resultType string). - :type server: object - :param database: Required. Database name for connection. Type: string (or Expression with - resultType string). - :type database: object - :param authentication_type: AuthenticationType to be used for connection. Possible values - include: "Basic". - :type authentication_type: str or ~azure.synapse.artifacts.models.Db2AuthenticationType - :param username: Username for authentication. Type: string (or Expression with resultType - string). - :type username: object - :param password: Password for authentication. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param package_collection: Under where packages are created when querying database. Type: - string (or Expression with resultType string). - :type package_collection: object - :param certificate_common_name: Certificate Common Name when TLS is enabled. Type: string (or - Expression with resultType string). - :type certificate_common_name: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param code: Required. Error code. + :type code: str + :param message: Required. Error message. + :type message: str + :param target: Property name/path in request associated with error. + :type target: str + :param details: Array with additional error details. + :type details: list[~azure.synapse.artifacts.models.CloudError] """ _validation = { - 'type': {'required': True}, - 'server': {'required': True}, - 'database': {'required': True}, + 'code': {'required': True}, + 'message': {'required': True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'package_collection': {'key': 'typeProperties.packageCollection', 'type': 'object'}, - 'certificate_common_name': {'key': 'typeProperties.certificateCommonName', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'code': {'key': 'error.code', 'type': 'str'}, + 'message': {'key': 'error.message', 'type': 'str'}, + 'target': {'key': 'error.target', 'type': 'str'}, + 'details': {'key': 'error.details', 'type': '[CloudError]'}, } def __init__( self, **kwargs ): - super(Db2LinkedService, self).__init__(**kwargs) - self.type = 'Db2' # type: str - self.server = kwargs['server'] - self.database = kwargs['database'] - self.authentication_type = kwargs.get('authentication_type', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.package_collection = kwargs.get('package_collection', None) - self.certificate_common_name = kwargs.get('certificate_common_name', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(CloudError, self).__init__(**kwargs) + self.code = kwargs['code'] + self.message = kwargs['message'] + self.target = kwargs.get('target', None) + self.details = kwargs.get('details', None) -class Db2TableDataset(Dataset): - """The Db2 table dataset. +class CommonDataServiceForAppsEntityDataset(Dataset): + """The Common Data Service for Apps entity dataset. All required parameters must be populated in order to send to Azure. @@ -6338,14 +6893,9 @@ class Db2TableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table - properties instead. - :type table_name: object - :param schema_type_properties_schema: The Db2 schema name. Type: string (or Expression with - resultType string). - :type schema_type_properties_schema: object - :param table: The Db2 table name. Type: string (or Expression with resultType string). - :type table: object + :param entity_name: The logical name of the entity. Type: string (or Expression with resultType + string). + :type entity_name: object """ _validation = { @@ -6363,284 +6913,251 @@ class Db2TableDataset(Dataset): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, } def __init__( self, **kwargs ): - super(Db2TableDataset, self).__init__(**kwargs) - self.type = 'Db2Table' # type: str - self.table_name = kwargs.get('table_name', None) - self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) - self.table = kwargs.get('table', None) + super(CommonDataServiceForAppsEntityDataset, self).__init__(**kwargs) + self.type = 'CommonDataServiceForAppsEntity' # type: str + self.entity_name = kwargs.get('entity_name', None) -class DeleteActivity(ExecutionActivity): - """Delete activity. +class CommonDataServiceForAppsLinkedService(LinkedService): + """Common Data Service for Apps linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. Type of linked service.Constant filled by server. :type type: str - :param description: Activity description. + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param recursive: If true, files or sub-folders under current folder path will be deleted - recursively. Default is false. Type: boolean (or Expression with resultType boolean). - :type recursive: object - :param max_concurrent_connections: The max concurrent connections to connect data source at the - same time. - :type max_concurrent_connections: int - :param enable_logging: Whether to record detailed logs of delete-activity execution. Default - value is false. Type: boolean (or Expression with resultType boolean). - :type enable_logging: object - :param log_storage_settings: Log storage settings customer need to provide when enableLogging - is true. - :type log_storage_settings: ~azure.synapse.artifacts.models.LogStorageSettings - :param dataset: Required. Delete activity dataset reference. - :type dataset: ~azure.synapse.artifacts.models.DatasetReference + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param deployment_type: Required. The deployment type of the Common Data Service for Apps + instance. 'Online' for Common Data Service for Apps Online and 'OnPremisesWithIfd' for Common + Data Service for Apps on-premises with Ifd. Type: string (or Expression with resultType + string). Possible values include: "Online", "OnPremisesWithIfd". + :type deployment_type: str or ~azure.synapse.artifacts.models.DynamicsDeploymentType + :param host_name: The host name of the on-premises Common Data Service for Apps server. The + property is required for on-prem and not allowed for online. Type: string (or Expression with + resultType string). + :type host_name: object + :param port: The port of on-premises Common Data Service for Apps server. The property is + required for on-prem and not allowed for online. Default is 443. Type: integer (or Expression + with resultType integer), minimum: 0. + :type port: object + :param service_uri: The URL to the Microsoft Common Data Service for Apps server. The property + is required for on-line and not allowed for on-prem. Type: string (or Expression with + resultType string). + :type service_uri: object + :param organization_name: The organization name of the Common Data Service for Apps instance. + The property is required for on-prem and required for online when there are more than one + Common Data Service for Apps instances associated with the user. Type: string (or Expression + with resultType string). + :type organization_name: object + :param authentication_type: Required. The authentication type to connect to Common Data Service + for Apps server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario. + 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or + Expression with resultType string). Possible values include: "Office365", "Ifd", + "AADServicePrincipal". + :type authentication_type: str or ~azure.synapse.artifacts.models.DynamicsAuthenticationType + :param username: User name to access the Common Data Service for Apps instance. Type: string + (or Expression with resultType string). + :type username: object + :param password: Password to access the Common Data Service for Apps instance. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param service_principal_id: The client ID of the application in Azure Active Directory used + for Server-To-Server authentication. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_credential_type: The service principal credential type to use in + Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' + for certificate. Type: string (or Expression with resultType string). Possible values include: + "ServicePrincipalKey", "ServicePrincipalCert". + :type service_principal_credential_type: str or + ~azure.synapse.artifacts.models.DynamicsServicePrincipalCredentialType + :param service_principal_credential: The credential of the service principal object in Azure + Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', + servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If + servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only + be AzureKeyVaultSecretReference. + :type service_principal_credential: ~azure.synapse.artifacts.models.SecretBase + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, - 'max_concurrent_connections': {'minimum': 1}, - 'dataset': {'required': True}, + 'deployment_type': {'required': True}, + 'authentication_type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'recursive': {'key': 'typeProperties.recursive', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'typeProperties.maxConcurrentConnections', 'type': 'int'}, - 'enable_logging': {'key': 'typeProperties.enableLogging', 'type': 'object'}, - 'log_storage_settings': {'key': 'typeProperties.logStorageSettings', 'type': 'LogStorageSettings'}, - 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, + 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, + 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'str'}, + 'service_principal_credential': {'key': 'typeProperties.servicePrincipalCredential', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, **kwargs ): - super(DeleteActivity, self).__init__(**kwargs) - self.type = 'Delete' # type: str - self.recursive = kwargs.get('recursive', None) - self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) - self.enable_logging = kwargs.get('enable_logging', None) - self.log_storage_settings = kwargs.get('log_storage_settings', None) - self.dataset = kwargs['dataset'] + super(CommonDataServiceForAppsLinkedService, self).__init__(**kwargs) + self.type = 'CommonDataServiceForApps' # type: str + self.deployment_type = kwargs['deployment_type'] + self.host_name = kwargs.get('host_name', None) + self.port = kwargs.get('port', None) + self.service_uri = kwargs.get('service_uri', None) + self.organization_name = kwargs.get('organization_name', None) + self.authentication_type = kwargs['authentication_type'] + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_credential_type = kwargs.get('service_principal_credential_type', None) + self.service_principal_credential = kwargs.get('service_principal_credential', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) -class DeleteDataFlowDebugSessionRequest(msrest.serialization.Model): - """Request body structure for deleting data flow debug session. +class CommonDataServiceForAppsSink(CopySink): + """A copy activity Common Data Service for Apps sink. - :param session_id: The ID of data flow debug session. - :type session_id: str - :param data_flow_name: The data flow which contains the debug session. - :type data_flow_name: str + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param write_behavior: Required. The write behavior for the operation. Possible values include: + "Upsert". + :type write_behavior: str or ~azure.synapse.artifacts.models.DynamicsSinkWriteBehavior + :param ignore_null_values: The flag indicating whether to ignore null values from input dataset + (except key fields) during write operation. Default is false. Type: boolean (or Expression with + resultType boolean). + :type ignore_null_values: object + :param alternate_key_name: The logical name of the alternate key which will be used when + upserting records. Type: string (or Expression with resultType string). + :type alternate_key_name: object """ + _validation = { + 'type': {'required': True}, + 'write_behavior': {'required': True}, + } + _attribute_map = { - 'session_id': {'key': 'sessionId', 'type': 'str'}, - 'data_flow_name': {'key': 'dataFlowName', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, + 'alternate_key_name': {'key': 'alternateKeyName', 'type': 'object'}, } def __init__( self, **kwargs ): - super(DeleteDataFlowDebugSessionRequest, self).__init__(**kwargs) - self.session_id = kwargs.get('session_id', None) - self.data_flow_name = kwargs.get('data_flow_name', None) + super(CommonDataServiceForAppsSink, self).__init__(**kwargs) + self.type = 'CommonDataServiceForAppsSink' # type: str + self.write_behavior = kwargs['write_behavior'] + self.ignore_null_values = kwargs.get('ignore_null_values', None) + self.alternate_key_name = kwargs.get('alternate_key_name', None) -class DelimitedTextDataset(Dataset): - """Delimited text dataset. +class CommonDataServiceForAppsSource(CopySource): + """A copy activity Common Data Service for Apps source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param location: The location of the delimited text storage. - :type location: ~azure.synapse.artifacts.models.DatasetLocation - :param column_delimiter: The column delimiter. Type: string (or Expression with resultType - string). - :type column_delimiter: object - :param row_delimiter: The row delimiter. Type: string (or Expression with resultType string). - :type row_delimiter: object - :param encoding_name: The code page name of the preferred encoding. If miss, the default value - is UTF-8, unless BOM denotes another Unicode encoding. Refer to the name column of the table in - the following link to set supported values: - https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with - resultType string). - :type encoding_name: object - :param compression_codec: Possible values include: "bzip2", "gzip", "deflate", "zipDeflate", - "snappy", "lz4". - :type compression_codec: str or ~azure.synapse.artifacts.models.DelimitedTextCompressionCodec - :param compression_level: The data compression method used for DelimitedText. Possible values - include: "Optimal", "Fastest". - :type compression_level: str or ~azure.synapse.artifacts.models.DatasetCompressionLevel - :param quote_char: The quote character. Type: string (or Expression with resultType string). - :type quote_char: object - :param escape_char: The escape character. Type: string (or Expression with resultType string). - :type escape_char: object - :param first_row_as_header: When used as input, treat the first row of data as headers. When - used as output,write the headers into the output as the first row of data. The default value is - false. Type: boolean (or Expression with resultType boolean). - :type first_row_as_header: object - :param null_value: The null value string. Type: string (or Expression with resultType string). - :type null_value: object + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query: FetchXML is a proprietary query language that is used in Microsoft Common Data + Service for Apps (online & on-premises). Type: string (or Expression with resultType string). + :type query: object """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'column_delimiter': {'key': 'typeProperties.columnDelimiter', 'type': 'object'}, - 'row_delimiter': {'key': 'typeProperties.rowDelimiter', 'type': 'object'}, - 'encoding_name': {'key': 'typeProperties.encodingName', 'type': 'object'}, - 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'str'}, - 'compression_level': {'key': 'typeProperties.compressionLevel', 'type': 'str'}, - 'quote_char': {'key': 'typeProperties.quoteChar', 'type': 'object'}, - 'escape_char': {'key': 'typeProperties.escapeChar', 'type': 'object'}, - 'first_row_as_header': {'key': 'typeProperties.firstRowAsHeader', 'type': 'object'}, - 'null_value': {'key': 'typeProperties.nullValue', 'type': 'object'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, } def __init__( self, **kwargs ): - super(DelimitedTextDataset, self).__init__(**kwargs) - self.type = 'DelimitedText' # type: str - self.location = kwargs.get('location', None) - self.column_delimiter = kwargs.get('column_delimiter', None) - self.row_delimiter = kwargs.get('row_delimiter', None) - self.encoding_name = kwargs.get('encoding_name', None) - self.compression_codec = kwargs.get('compression_codec', None) - self.compression_level = kwargs.get('compression_level', None) - self.quote_char = kwargs.get('quote_char', None) - self.escape_char = kwargs.get('escape_char', None) - self.first_row_as_header = kwargs.get('first_row_as_header', None) - self.null_value = kwargs.get('null_value', None) + super(CommonDataServiceForAppsSource, self).__init__(**kwargs) + self.type = 'CommonDataServiceForAppsSource' # type: str + self.query = kwargs.get('query', None) -class DocumentDbCollectionDataset(Dataset): - """Microsoft Azure Document Database Collection dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param collection_name: Required. Document Database collection name. Type: string (or - Expression with resultType string). - :type collection_name: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - 'collection_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'collection_name': {'key': 'typeProperties.collectionName', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(DocumentDbCollectionDataset, self).__init__(**kwargs) - self.type = 'DocumentDbCollection' # type: str - self.collection_name = kwargs['collection_name'] - - -class DrillLinkedService(LinkedService): - """Drill server linked service. +class ConcurLinkedService(LinkedService): + """Concur Service linked service. All required parameters must be populated in order to send to Azure. @@ -6657,11 +7174,23 @@ class DrillLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param connection_string: An ODBC connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param pwd: The Azure key vault secret reference of password in connection string. - :type pwd: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param client_id: Required. Application client_id supplied by Concur App Management. + :type client_id: object + :param username: Required. The user name that you use to access Concur Service. + :type username: object + :param password: The password corresponding to the user name that you provided in the username + field. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using + HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name in the server's + certificate to match the host name of the server when connecting over SSL. The default value is + true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of the server when + connecting over SSL. The default value is true. + :type use_peer_verification: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -6670,6 +7199,8 @@ class DrillLinkedService(LinkedService): _validation = { 'type': {'required': True}, + 'client_id': {'required': True}, + 'username': {'required': True}, } _attribute_map = { @@ -6679,8 +7210,12 @@ class DrillLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -6688,15 +7223,19 @@ def __init__( self, **kwargs ): - super(DrillLinkedService, self).__init__(**kwargs) - self.type = 'Drill' # type: str - self.connection_string = kwargs.get('connection_string', None) - self.pwd = kwargs.get('pwd', None) + super(ConcurLinkedService, self).__init__(**kwargs) + self.type = 'Concur' # type: str + self.client_id = kwargs['client_id'] + self.username = kwargs['username'] + self.password = kwargs.get('password', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) -class DrillTableDataset(Dataset): - """Drill server dataset. +class ConcurObjectDataset(Dataset): + """Concur Service dataset. All required parameters must be populated in order to send to Azure. @@ -6722,14 +7261,8 @@ class DrillTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table - properties instead. + :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object - :param table: The table name of the Drill. Type: string (or Expression with resultType string). - :type table: object - :param schema_type_properties_schema: The schema name of the Drill. Type: string (or Expression - with resultType string). - :type schema_type_properties_schema: object """ _validation = { @@ -6748,221 +7281,257 @@ class DrillTableDataset(Dataset): 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } def __init__( self, **kwargs ): - super(DrillTableDataset, self).__init__(**kwargs) - self.type = 'DrillTable' # type: str + super(ConcurObjectDataset, self).__init__(**kwargs) + self.type = 'ConcurObject' # type: str self.table_name = kwargs.get('table_name', None) - self.table = kwargs.get('table', None) - self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) -class DynamicsAXLinkedService(LinkedService): - """Dynamics AX linked service. +class ConcurSource(TabularSource): + """A copy activity Concur Service source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param url: Required. The Dynamics AX (or Dynamics 365 Finance and Operations) instance OData - endpoint. - :type url: object - :param service_principal_id: Required. Specify the application's client ID. Type: string (or - Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: Required. Specify the application's key. Mark this field as a - SecureString to store it securely in Data Factory, or reference a secret stored in Azure Key - Vault. Type: string (or Expression with resultType string). - :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase - :param tenant: Required. Specify the tenant information (domain name or tenant ID) under which - your application resides. Retrieve it by hovering the mouse in the top-right corner of the - Azure portal. Type: string (or Expression with resultType string). - :type tenant: object - :param aad_resource_id: Required. Specify the resource you are requesting authorization. Type: - string (or Expression with resultType string). - :type aad_resource_id: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object """ _validation = { 'type': {'required': True}, - 'url': {'required': True}, - 'service_principal_id': {'required': True}, - 'service_principal_key': {'required': True}, - 'tenant': {'required': True}, - 'aad_resource_id': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, } def __init__( self, **kwargs ): - super(DynamicsAXLinkedService, self).__init__(**kwargs) - self.type = 'DynamicsAX' # type: str - self.url = kwargs['url'] - self.service_principal_id = kwargs['service_principal_id'] - self.service_principal_key = kwargs['service_principal_key'] - self.tenant = kwargs['tenant'] - self.aad_resource_id = kwargs['aad_resource_id'] - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(ConcurSource, self).__init__(**kwargs) + self.type = 'ConcurSource' # type: str + self.query = kwargs.get('query', None) -class DynamicsAXResourceDataset(Dataset): - """The path of the Dynamics AX OData entity. +class ControlActivity(Activity): + """Base class for all control activities like IfCondition, ForEach , Until. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. :type type: str - :param description: Dataset description. + :param description: Activity description. :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param path: Required. The path of the Dynamics AX OData entity. Type: string (or Expression - with resultType string). - :type path: object + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] """ _validation = { + 'name': {'required': True}, 'type': {'required': True}, - 'linked_service_name': {'required': True}, - 'path': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'path': {'key': 'typeProperties.path', 'type': 'object'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, } def __init__( self, **kwargs ): - super(DynamicsAXResourceDataset, self).__init__(**kwargs) - self.type = 'DynamicsAXResource' # type: str - self.path = kwargs['path'] + super(ControlActivity, self).__init__(**kwargs) + self.type = 'Container' # type: str -class DynamicsCrmEntityDataset(Dataset): - """The Dynamics CRM entity dataset. +class CopyActivity(ExecutionActivity): + """Copy activity. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. :type type: str - :param description: Dataset description. + :param description: Activity description. :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param entity_name: The logical name of the entity. Type: string (or Expression with resultType - string). - :type entity_name: object + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param inputs: List of inputs for the activity. + :type inputs: list[~azure.synapse.artifacts.models.DatasetReference] + :param outputs: List of outputs for the activity. + :type outputs: list[~azure.synapse.artifacts.models.DatasetReference] + :param source: Required. Copy activity source. + :type source: ~azure.synapse.artifacts.models.CopySource + :param sink: Required. Copy activity sink. + :type sink: ~azure.synapse.artifacts.models.CopySink + :param translator: Copy activity translator. If not specified, tabular translator is used. + :type translator: object + :param enable_staging: Specifies whether to copy data via an interim staging. Default value is + false. Type: boolean (or Expression with resultType boolean). + :type enable_staging: object + :param staging_settings: Specifies interim staging settings when EnableStaging is true. + :type staging_settings: ~azure.synapse.artifacts.models.StagingSettings + :param parallel_copies: Maximum number of concurrent sessions opened on the source or sink to + avoid overloading the data store. Type: integer (or Expression with resultType integer), + minimum: 0. + :type parallel_copies: object + :param data_integration_units: Maximum number of data integration units that can be used to + perform this data movement. Type: integer (or Expression with resultType integer), minimum: 0. + :type data_integration_units: object + :param enable_skip_incompatible_row: Whether to skip incompatible row. Default value is false. + Type: boolean (or Expression with resultType boolean). + :type enable_skip_incompatible_row: object + :param redirect_incompatible_row_settings: Redirect incompatible row settings when + EnableSkipIncompatibleRow is true. + :type redirect_incompatible_row_settings: + ~azure.synapse.artifacts.models.RedirectIncompatibleRowSettings + :param preserve_rules: Preserve Rules. + :type preserve_rules: list[object] + :param preserve: Preserve rules. + :type preserve: list[object] """ _validation = { + 'name': {'required': True}, 'type': {'required': True}, - 'linked_service_name': {'required': True}, + 'source': {'required': True}, + 'sink': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'inputs': {'key': 'inputs', 'type': '[DatasetReference]'}, + 'outputs': {'key': 'outputs', 'type': '[DatasetReference]'}, + 'source': {'key': 'typeProperties.source', 'type': 'CopySource'}, + 'sink': {'key': 'typeProperties.sink', 'type': 'CopySink'}, + 'translator': {'key': 'typeProperties.translator', 'type': 'object'}, + 'enable_staging': {'key': 'typeProperties.enableStaging', 'type': 'object'}, + 'staging_settings': {'key': 'typeProperties.stagingSettings', 'type': 'StagingSettings'}, + 'parallel_copies': {'key': 'typeProperties.parallelCopies', 'type': 'object'}, + 'data_integration_units': {'key': 'typeProperties.dataIntegrationUnits', 'type': 'object'}, + 'enable_skip_incompatible_row': {'key': 'typeProperties.enableSkipIncompatibleRow', 'type': 'object'}, + 'redirect_incompatible_row_settings': {'key': 'typeProperties.redirectIncompatibleRowSettings', 'type': 'RedirectIncompatibleRowSettings'}, + 'preserve_rules': {'key': 'typeProperties.preserveRules', 'type': '[object]'}, + 'preserve': {'key': 'typeProperties.preserve', 'type': '[object]'}, } def __init__( self, **kwargs ): - super(DynamicsCrmEntityDataset, self).__init__(**kwargs) - self.type = 'DynamicsCrmEntity' # type: str - self.entity_name = kwargs.get('entity_name', None) + super(CopyActivity, self).__init__(**kwargs) + self.type = 'Copy' # type: str + self.inputs = kwargs.get('inputs', None) + self.outputs = kwargs.get('outputs', None) + self.source = kwargs['source'] + self.sink = kwargs['sink'] + self.translator = kwargs.get('translator', None) + self.enable_staging = kwargs.get('enable_staging', None) + self.staging_settings = kwargs.get('staging_settings', None) + self.parallel_copies = kwargs.get('parallel_copies', None) + self.data_integration_units = kwargs.get('data_integration_units', None) + self.enable_skip_incompatible_row = kwargs.get('enable_skip_incompatible_row', None) + self.redirect_incompatible_row_settings = kwargs.get('redirect_incompatible_row_settings', None) + self.preserve_rules = kwargs.get('preserve_rules', None) + self.preserve = kwargs.get('preserve', None) -class DynamicsCrmLinkedService(LinkedService): - """Dynamics CRM linked service. +class CopyTranslator(msrest.serialization.Model): + """A copy activity translator. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: TabularTranslator. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy translator type.Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'TabularTranslator': 'TabularTranslator'} + } + + def __init__( + self, + **kwargs + ): + super(CopyTranslator, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = 'CopyTranslator' # type: str + + +class CosmosDbLinkedService(LinkedService): + """Microsoft Azure Cosmos Database (CosmosDB) linked service. All required parameters must be populated in order to send to Azure. @@ -6979,52 +7548,17 @@ class DynamicsCrmLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param deployment_type: Required. The deployment type of the Dynamics CRM instance. 'Online' - for Dynamics CRM Online and 'OnPremisesWithIfd' for Dynamics CRM on-premises with Ifd. Type: - string (or Expression with resultType string). Possible values include: "Online", - "OnPremisesWithIfd". - :type deployment_type: str or ~azure.synapse.artifacts.models.DynamicsDeploymentType - :param host_name: The host name of the on-premises Dynamics CRM server. The property is - required for on-prem and not allowed for online. Type: string (or Expression with resultType - string). - :type host_name: object - :param port: The port of on-premises Dynamics CRM server. The property is required for on-prem - and not allowed for online. Default is 443. Type: integer (or Expression with resultType - integer), minimum: 0. - :type port: object - :param service_uri: The URL to the Microsoft Dynamics CRM server. The property is required for - on-line and not allowed for on-prem. Type: string (or Expression with resultType string). - :type service_uri: object - :param organization_name: The organization name of the Dynamics CRM instance. The property is - required for on-prem and required for online when there are more than one Dynamics CRM - instances associated with the user. Type: string (or Expression with resultType string). - :type organization_name: object - :param authentication_type: Required. The authentication type to connect to Dynamics CRM - server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, - 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or - Expression with resultType string). Possible values include: "Office365", "Ifd", - "AADServicePrincipal". - :type authentication_type: str or ~azure.synapse.artifacts.models.DynamicsAuthenticationType - :param username: User name to access the Dynamics CRM instance. Type: string (or Expression - with resultType string). - :type username: object - :param password: Password to access the Dynamics CRM instance. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param service_principal_id: The client ID of the application in Azure Active Directory used - for Server-To-Server authentication. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_credential_type: The service principal credential type to use in - Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' - for certificate. Type: string (or Expression with resultType string). Possible values include: - "ServicePrincipalKey", "ServicePrincipalCert". - :type service_principal_credential_type: str or - ~azure.synapse.artifacts.models.DynamicsServicePrincipalCredentialType - :param service_principal_credential: The credential of the service principal object in Azure - Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', - servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If - servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only - be AzureKeyVaultSecretReference. - :type service_principal_credential: ~azure.synapse.artifacts.models.SecretBase + :param connection_string: The connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param account_endpoint: The endpoint of the Azure CosmosDB account. Type: string (or + Expression with resultType string). + :type account_endpoint: object + :param database: The name of the database. Type: string (or Expression with resultType string). + :type database: object + :param account_key: The account key of the Azure CosmosDB account. Type: SecureString or + AzureKeyVaultSecretReference. + :type account_key: ~azure.synapse.artifacts.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -7033,8 +7567,6 @@ class DynamicsCrmLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'deployment_type': {'required': True}, - 'authentication_type': {'required': True}, } _attribute_map = { @@ -7044,17 +7576,10 @@ class DynamicsCrmLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, - 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, - 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'str'}, - 'service_principal_credential': {'key': 'typeProperties.servicePrincipalCredential', 'type': 'SecretBase'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'account_endpoint': {'key': 'typeProperties.accountEndpoint', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'account_key': {'key': 'typeProperties.accountKey', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -7062,24 +7587,17 @@ def __init__( self, **kwargs ): - super(DynamicsCrmLinkedService, self).__init__(**kwargs) - self.type = 'DynamicsCrm' # type: str - self.deployment_type = kwargs['deployment_type'] - self.host_name = kwargs.get('host_name', None) - self.port = kwargs.get('port', None) - self.service_uri = kwargs.get('service_uri', None) - self.organization_name = kwargs.get('organization_name', None) - self.authentication_type = kwargs['authentication_type'] - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_credential_type = kwargs.get('service_principal_credential_type', None) - self.service_principal_credential = kwargs.get('service_principal_credential', None) + super(CosmosDbLinkedService, self).__init__(**kwargs) + self.type = 'CosmosDb' # type: str + self.connection_string = kwargs.get('connection_string', None) + self.account_endpoint = kwargs.get('account_endpoint', None) + self.database = kwargs.get('database', None) + self.account_key = kwargs.get('account_key', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) -class DynamicsEntityDataset(Dataset): - """The Dynamics entity dataset. +class CosmosDbMongoDbApiCollectionDataset(Dataset): + """The CosmosDB (MongoDB API) database dataset. All required parameters must be populated in order to send to Azure. @@ -7105,14 +7623,15 @@ class DynamicsEntityDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param entity_name: The logical name of the entity. Type: string (or Expression with resultType - string). - :type entity_name: object + :param collection: Required. The collection name of the CosmosDB (MongoDB API) database. Type: + string (or Expression with resultType string). + :type collection: object """ _validation = { 'type': {'required': True}, 'linked_service_name': {'required': True}, + 'collection': {'required': True}, } _attribute_map = { @@ -7125,20 +7644,20 @@ class DynamicsEntityDataset(Dataset): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, + 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, } def __init__( self, **kwargs ): - super(DynamicsEntityDataset, self).__init__(**kwargs) - self.type = 'DynamicsEntity' # type: str - self.entity_name = kwargs.get('entity_name', None) + super(CosmosDbMongoDbApiCollectionDataset, self).__init__(**kwargs) + self.type = 'CosmosDbMongoDbApiCollection' # type: str + self.collection = kwargs['collection'] -class DynamicsLinkedService(LinkedService): - """Dynamics linked service. +class CosmosDbMongoDbApiLinkedService(LinkedService): + """Linked service for CosmosDB (MongoDB API) data source. All required parameters must be populated in order to send to Azure. @@ -7155,59 +7674,19 @@ class DynamicsLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param deployment_type: Required. The deployment type of the Dynamics instance. 'Online' for - Dynamics Online and 'OnPremisesWithIfd' for Dynamics on-premises with Ifd. Type: string (or - Expression with resultType string). Possible values include: "Online", "OnPremisesWithIfd". - :type deployment_type: str or ~azure.synapse.artifacts.models.DynamicsDeploymentType - :param host_name: The host name of the on-premises Dynamics server. The property is required - for on-prem and not allowed for online. Type: string (or Expression with resultType string). - :type host_name: str - :param port: The port of on-premises Dynamics server. The property is required for on-prem and - not allowed for online. Default is 443. Type: integer (or Expression with resultType integer), - minimum: 0. - :type port: str - :param service_uri: The URL to the Microsoft Dynamics server. The property is required for on- - line and not allowed for on-prem. Type: string (or Expression with resultType string). - :type service_uri: str - :param organization_name: The organization name of the Dynamics instance. The property is - required for on-prem and required for online when there are more than one Dynamics instances - associated with the user. Type: string (or Expression with resultType string). - :type organization_name: str - :param authentication_type: Required. The authentication type to connect to Dynamics server. - 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, 'AADServicePrincipal' - for Server-To-Server authentication in online scenario. Type: string (or Expression with - resultType string). Possible values include: "Office365", "Ifd", "AADServicePrincipal". - :type authentication_type: str or ~azure.synapse.artifacts.models.DynamicsAuthenticationType - :param username: User name to access the Dynamics instance. Type: string (or Expression with - resultType string). - :type username: object - :param password: Password to access the Dynamics instance. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param service_principal_id: The client ID of the application in Azure Active Directory used - for Server-To-Server authentication. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_credential_type: The service principal credential type to use in - Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' - for certificate. Type: string (or Expression with resultType string). Possible values include: - "ServicePrincipalKey", "ServicePrincipalCert". - :type service_principal_credential_type: str or - ~azure.synapse.artifacts.models.DynamicsServicePrincipalCredentialType - :param service_principal_credential: The credential of the service principal object in Azure - Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', - servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If - servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only - be AzureKeyVaultSecretReference. - :type service_principal_credential: ~azure.synapse.artifacts.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param connection_string: Required. The CosmosDB (MongoDB API) connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param database: Required. The name of the CosmosDB (MongoDB API) database that you want to + access. Type: string (or Expression with resultType string). + :type database: object """ _validation = { 'type': {'required': True}, - 'deployment_type': {'required': True}, - 'authentication_type': {'required': True}, + 'connection_string': {'required': True}, + 'database': {'required': True}, } _attribute_map = { @@ -7217,120 +7696,140 @@ class DynamicsLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, - 'host_name': {'key': 'typeProperties.hostName', 'type': 'str'}, - 'port': {'key': 'typeProperties.port', 'type': 'str'}, - 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'str'}, - 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'str'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'str'}, - 'service_principal_credential': {'key': 'typeProperties.servicePrincipalCredential', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, } def __init__( self, **kwargs ): - super(DynamicsLinkedService, self).__init__(**kwargs) - self.type = 'Dynamics' # type: str - self.deployment_type = kwargs['deployment_type'] - self.host_name = kwargs.get('host_name', None) - self.port = kwargs.get('port', None) - self.service_uri = kwargs.get('service_uri', None) - self.organization_name = kwargs.get('organization_name', None) - self.authentication_type = kwargs['authentication_type'] - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_credential_type = kwargs.get('service_principal_credential_type', None) - self.service_principal_credential = kwargs.get('service_principal_credential', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(CosmosDbMongoDbApiLinkedService, self).__init__(**kwargs) + self.type = 'CosmosDbMongoDbApi' # type: str + self.connection_string = kwargs['connection_string'] + self.database = kwargs['database'] -class EloquaLinkedService(LinkedService): - """Eloqua server linked service. +class CosmosDbMongoDbApiSink(CopySink): + """A copy activity sink for a CosmosDB (MongoDB API) database. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Copy sink type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param endpoint: Required. The endpoint of the Eloqua server. (i.e. eloqua.example.com). - :type endpoint: object - :param username: Required. The site name and user name of your Eloqua account in the form: - sitename/username. (i.e. Eloqua/Alice). - :type username: object - :param password: The password corresponding to the user name. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using - HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name in the server's - certificate to match the host name of the server when connecting over SSL. The default value is - true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of the server when - connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param write_behavior: Specifies whether the document with same key to be overwritten (upsert) + rather than throw exception (insert). The default value is "insert". Type: string (or + Expression with resultType string). Type: string (or Expression with resultType string). + :type write_behavior: object """ _validation = { 'type': {'required': True}, - 'endpoint': {'required': True}, - 'username': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, } def __init__( self, **kwargs ): - super(EloquaLinkedService, self).__init__(**kwargs) - self.type = 'Eloqua' # type: str - self.endpoint = kwargs['endpoint'] - self.username = kwargs['username'] - self.password = kwargs.get('password', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(CosmosDbMongoDbApiSink, self).__init__(**kwargs) + self.type = 'CosmosDbMongoDbApiSink' # type: str + self.write_behavior = kwargs.get('write_behavior', None) -class EloquaObjectDataset(Dataset): - """Eloqua server dataset. +class CosmosDbMongoDbApiSource(CopySource): + """A copy activity source for a CosmosDB (MongoDB API) database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param filter: Specifies selection filter using query operators. To return all documents in a + collection, omit this parameter or pass an empty document ({}). Type: string (or Expression + with resultType string). + :type filter: object + :param cursor_methods: Cursor methods for Mongodb query. + :type cursor_methods: ~azure.synapse.artifacts.models.MongoDbCursorMethodsProperties + :param batch_size: Specifies the number of documents to return in each batch of the response + from MongoDB instance. In most cases, modifying the batch size will not affect the user or the + application. This property's main purpose is to avoid hit the limitation of response size. + Type: integer (or Expression with resultType integer). + :type batch_size: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'filter': {'key': 'filter', 'type': 'object'}, + 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, + 'batch_size': {'key': 'batchSize', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(CosmosDbMongoDbApiSource, self).__init__(**kwargs) + self.type = 'CosmosDbMongoDbApiSource' # type: str + self.filter = kwargs.get('filter', None) + self.cursor_methods = kwargs.get('cursor_methods', None) + self.batch_size = kwargs.get('batch_size', None) + self.query_timeout = kwargs.get('query_timeout', None) + + +class CosmosDbSqlApiCollectionDataset(Dataset): + """Microsoft Azure CosmosDB (SQL API) Collection dataset. All required parameters must be populated in order to send to Azure. @@ -7356,13 +7855,15 @@ class EloquaObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :param collection_name: Required. CosmosDB (SQL API) collection name. Type: string (or + Expression with resultType string). + :type collection_name: object """ _validation = { 'type': {'required': True}, 'linked_service_name': {'required': True}, + 'collection_name': {'required': True}, } _attribute_map = { @@ -7375,449 +7876,376 @@ class EloquaObjectDataset(Dataset): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(EloquaObjectDataset, self).__init__(**kwargs) - self.type = 'EloquaObject' # type: str - self.table_name = kwargs.get('table_name', None) - - -class EvaluateDataFlowExpressionRequest(msrest.serialization.Model): - """Request body structure for data flow expression preview. - - :param session_id: The ID of data flow debug session. - :type session_id: str - :param data_flow_name: The data flow which contains the debug session. - :type data_flow_name: str - :param stream_name: The output stream name. - :type stream_name: str - :param row_limits: The row limit for preview request. - :type row_limits: int - :param expression: The expression for preview. - :type expression: str - """ - - _attribute_map = { - 'session_id': {'key': 'sessionId', 'type': 'str'}, - 'data_flow_name': {'key': 'dataFlowName', 'type': 'str'}, - 'stream_name': {'key': 'streamName', 'type': 'str'}, - 'row_limits': {'key': 'rowLimits', 'type': 'int'}, - 'expression': {'key': 'expression', 'type': 'str'}, + 'collection_name': {'key': 'typeProperties.collectionName', 'type': 'object'}, } def __init__( self, **kwargs ): - super(EvaluateDataFlowExpressionRequest, self).__init__(**kwargs) - self.session_id = kwargs.get('session_id', None) - self.data_flow_name = kwargs.get('data_flow_name', None) - self.stream_name = kwargs.get('stream_name', None) - self.row_limits = kwargs.get('row_limits', None) - self.expression = kwargs.get('expression', None) + super(CosmosDbSqlApiCollectionDataset, self).__init__(**kwargs) + self.type = 'CosmosDbSqlApiCollection' # type: str + self.collection_name = kwargs['collection_name'] -class ExecuteDataFlowActivity(ExecutionActivity): - """Execute data flow activity. +class CosmosDbSqlApiSink(CopySink): + """A copy activity Azure CosmosDB (SQL API) Collection sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. Copy sink type.Constant filled by server. :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param data_flow: Required. Data flow reference. - :type data_flow: ~azure.synapse.artifacts.models.DataFlowReference - :param staging: Staging info for execute data flow activity. - :type staging: ~azure.synapse.artifacts.models.DataFlowStagingInfo - :param integration_runtime: The integration runtime reference. - :type integration_runtime: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param compute: Compute properties for data flow activity. - :type compute: ~azure.synapse.artifacts.models.ExecuteDataFlowActivityTypePropertiesCompute + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param write_behavior: Describes how to write data to Azure Cosmos DB. Type: string (or + Expression with resultType string). Allowed values: insert and upsert. + :type write_behavior: object """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, - 'data_flow': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'data_flow': {'key': 'typeProperties.dataFlow', 'type': 'DataFlowReference'}, - 'staging': {'key': 'typeProperties.staging', 'type': 'DataFlowStagingInfo'}, - 'integration_runtime': {'key': 'typeProperties.integrationRuntime', 'type': 'IntegrationRuntimeReference'}, - 'compute': {'key': 'typeProperties.compute', 'type': 'ExecuteDataFlowActivityTypePropertiesCompute'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, } def __init__( self, **kwargs ): - super(ExecuteDataFlowActivity, self).__init__(**kwargs) - self.type = 'ExecuteDataFlow' # type: str - self.data_flow = kwargs['data_flow'] - self.staging = kwargs.get('staging', None) - self.integration_runtime = kwargs.get('integration_runtime', None) - self.compute = kwargs.get('compute', None) + super(CosmosDbSqlApiSink, self).__init__(**kwargs) + self.type = 'CosmosDbSqlApiSink' # type: str + self.write_behavior = kwargs.get('write_behavior', None) -class ExecuteDataFlowActivityTypePropertiesCompute(msrest.serialization.Model): - """Compute properties for data flow activity. +class CosmosDbSqlApiSource(CopySource): + """A copy activity Azure CosmosDB (SQL API) Collection source. - :param compute_type: Compute type of the cluster which will execute data flow job. Possible - values include: "General", "MemoryOptimized", "ComputeOptimized". - :type compute_type: str or ~azure.synapse.artifacts.models.DataFlowComputeType - :param core_count: Core count of the cluster which will execute data flow job. Supported values - are: 8, 16, 32, 48, 80, 144 and 272. - :type core_count: int + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query: SQL API query. Type: string (or Expression with resultType string). + :type query: object + :param page_size: Page size of the result. Type: integer (or Expression with resultType + integer). + :type page_size: object + :param preferred_regions: Preferred regions. Type: array of strings (or Expression with + resultType array of strings). + :type preferred_regions: object """ + _validation = { + 'type': {'required': True}, + } + _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'core_count': {'key': 'coreCount', 'type': 'int'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + 'page_size': {'key': 'pageSize', 'type': 'object'}, + 'preferred_regions': {'key': 'preferredRegions', 'type': 'object'}, } def __init__( self, **kwargs ): - super(ExecuteDataFlowActivityTypePropertiesCompute, self).__init__(**kwargs) - self.compute_type = kwargs.get('compute_type', None) - self.core_count = kwargs.get('core_count', None) + super(CosmosDbSqlApiSource, self).__init__(**kwargs) + self.type = 'CosmosDbSqlApiSource' # type: str + self.query = kwargs.get('query', None) + self.page_size = kwargs.get('page_size', None) + self.preferred_regions = kwargs.get('preferred_regions', None) -class ExecutePipelineActivity(Activity): - """Execute pipeline activity. +class CouchbaseLinkedService(LinkedService): + """Couchbase server linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. Type of linked service.Constant filled by server. :type type: str - :param description: Activity description. + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param pipeline: Required. Pipeline reference. - :type pipeline: ~azure.synapse.artifacts.models.PipelineReference - :param parameters: Pipeline parameters. - :type parameters: dict[str, object] - :param wait_on_completion: Defines whether activity execution will wait for the dependent - pipeline execution to finish. Default is false. - :type wait_on_completion: bool + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param connection_string: An ODBC connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param cred_string: The Azure key vault secret reference of credString in connection string. + :type cred_string: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, - 'pipeline': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'pipeline': {'key': 'typeProperties.pipeline', 'type': 'PipelineReference'}, - 'parameters': {'key': 'typeProperties.parameters', 'type': '{object}'}, - 'wait_on_completion': {'key': 'typeProperties.waitOnCompletion', 'type': 'bool'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'cred_string': {'key': 'typeProperties.credString', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, **kwargs ): - super(ExecutePipelineActivity, self).__init__(**kwargs) - self.type = 'ExecutePipeline' # type: str - self.pipeline = kwargs['pipeline'] - self.parameters = kwargs.get('parameters', None) - self.wait_on_completion = kwargs.get('wait_on_completion', None) + super(CouchbaseLinkedService, self).__init__(**kwargs) + self.type = 'Couchbase' # type: str + self.connection_string = kwargs.get('connection_string', None) + self.cred_string = kwargs.get('cred_string', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) -class ExecuteSSISPackageActivity(ExecutionActivity): - """Execute SSIS package activity. +class CouchbaseSource(TabularSource): + """A copy activity Couchbase server source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param package_location: Required. SSIS package location. - :type package_location: ~azure.synapse.artifacts.models.SSISPackageLocation - :param runtime: Specifies the runtime to execute SSIS package. The value should be "x86" or - "x64". Type: string (or Expression with resultType string). - :type runtime: object - :param logging_level: The logging level of SSIS package execution. Type: string (or Expression - with resultType string). - :type logging_level: object - :param environment_path: The environment path to execute the SSIS package. Type: string (or - Expression with resultType string). - :type environment_path: object - :param execution_credential: The package execution credential. - :type execution_credential: ~azure.synapse.artifacts.models.SSISExecutionCredential - :param connect_via: Required. The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param project_parameters: The project level parameters to execute the SSIS package. - :type project_parameters: dict[str, ~azure.synapse.artifacts.models.SSISExecutionParameter] - :param package_parameters: The package level parameters to execute the SSIS package. - :type package_parameters: dict[str, ~azure.synapse.artifacts.models.SSISExecutionParameter] - :param project_connection_managers: The project level connection managers to execute the SSIS - package. - :type project_connection_managers: dict[str, object] - :param package_connection_managers: The package level connection managers to execute the SSIS - package. - :type package_connection_managers: dict[str, object] - :param property_overrides: The property overrides to execute the SSIS package. - :type property_overrides: dict[str, ~azure.synapse.artifacts.models.SSISPropertyOverride] - :param log_location: SSIS package execution log location. - :type log_location: ~azure.synapse.artifacts.models.SSISLogLocation + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, - 'package_location': {'required': True}, - 'connect_via': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'package_location': {'key': 'typeProperties.packageLocation', 'type': 'SSISPackageLocation'}, - 'runtime': {'key': 'typeProperties.runtime', 'type': 'object'}, - 'logging_level': {'key': 'typeProperties.loggingLevel', 'type': 'object'}, - 'environment_path': {'key': 'typeProperties.environmentPath', 'type': 'object'}, - 'execution_credential': {'key': 'typeProperties.executionCredential', 'type': 'SSISExecutionCredential'}, - 'connect_via': {'key': 'typeProperties.connectVia', 'type': 'IntegrationRuntimeReference'}, - 'project_parameters': {'key': 'typeProperties.projectParameters', 'type': '{SSISExecutionParameter}'}, - 'package_parameters': {'key': 'typeProperties.packageParameters', 'type': '{SSISExecutionParameter}'}, - 'project_connection_managers': {'key': 'typeProperties.projectConnectionManagers', 'type': '{object}'}, - 'package_connection_managers': {'key': 'typeProperties.packageConnectionManagers', 'type': '{object}'}, - 'property_overrides': {'key': 'typeProperties.propertyOverrides', 'type': '{SSISPropertyOverride}'}, - 'log_location': {'key': 'typeProperties.logLocation', 'type': 'SSISLogLocation'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, } def __init__( self, **kwargs ): - super(ExecuteSSISPackageActivity, self).__init__(**kwargs) - self.type = 'ExecuteSSISPackage' # type: str - self.package_location = kwargs['package_location'] - self.runtime = kwargs.get('runtime', None) - self.logging_level = kwargs.get('logging_level', None) - self.environment_path = kwargs.get('environment_path', None) - self.execution_credential = kwargs.get('execution_credential', None) - self.connect_via = kwargs['connect_via'] - self.project_parameters = kwargs.get('project_parameters', None) - self.package_parameters = kwargs.get('package_parameters', None) - self.project_connection_managers = kwargs.get('project_connection_managers', None) - self.package_connection_managers = kwargs.get('package_connection_managers', None) - self.property_overrides = kwargs.get('property_overrides', None) - self.log_location = kwargs.get('log_location', None) - + super(CouchbaseSource, self).__init__(**kwargs) + self.type = 'CouchbaseSource' # type: str + self.query = kwargs.get('query', None) -class ExposureControlRequest(msrest.serialization.Model): - """The exposure control request. - :param feature_name: The feature name. - :type feature_name: str - :param feature_type: The feature type. - :type feature_type: str +class CouchbaseTableDataset(Dataset): + """Couchbase server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: The table name. Type: string (or Expression with resultType string). + :type table_name: object """ + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + _attribute_map = { - 'feature_name': {'key': 'featureName', 'type': 'str'}, - 'feature_type': {'key': 'featureType', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } def __init__( self, **kwargs ): - super(ExposureControlRequest, self).__init__(**kwargs) - self.feature_name = kwargs.get('feature_name', None) - self.feature_type = kwargs.get('feature_type', None) - + super(CouchbaseTableDataset, self).__init__(**kwargs) + self.type = 'CouchbaseTable' # type: str + self.table_name = kwargs.get('table_name', None) -class ExposureControlResponse(msrest.serialization.Model): - """The exposure control response. - Variables are only populated by the server, and will be ignored when sending a request. +class CreateDataFlowDebugSessionRequest(msrest.serialization.Model): + """Request body structure for creating data flow debug session. - :ivar feature_name: The feature name. - :vartype feature_name: str - :ivar value: The feature value. - :vartype value: str + :param data_flow_name: The name of the data flow. + :type data_flow_name: str + :param existing_cluster_id: The ID of existing Databricks cluster. + :type existing_cluster_id: str + :param cluster_timeout: Timeout setting for Databricks cluster. + :type cluster_timeout: int + :param new_cluster_name: The name of new Databricks cluster. + :type new_cluster_name: str + :param new_cluster_node_type: The type of new Databricks cluster. + :type new_cluster_node_type: str + :param data_bricks_linked_service: Data bricks linked service. + :type data_bricks_linked_service: ~azure.synapse.artifacts.models.LinkedServiceResource """ - _validation = { - 'feature_name': {'readonly': True}, - 'value': {'readonly': True}, - } - _attribute_map = { - 'feature_name': {'key': 'featureName', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'str'}, + 'data_flow_name': {'key': 'dataFlowName', 'type': 'str'}, + 'existing_cluster_id': {'key': 'existingClusterId', 'type': 'str'}, + 'cluster_timeout': {'key': 'clusterTimeout', 'type': 'int'}, + 'new_cluster_name': {'key': 'newClusterName', 'type': 'str'}, + 'new_cluster_node_type': {'key': 'newClusterNodeType', 'type': 'str'}, + 'data_bricks_linked_service': {'key': 'dataBricksLinkedService', 'type': 'LinkedServiceResource'}, } def __init__( self, **kwargs ): - super(ExposureControlResponse, self).__init__(**kwargs) - self.feature_name = None - self.value = None - + super(CreateDataFlowDebugSessionRequest, self).__init__(**kwargs) + self.data_flow_name = kwargs.get('data_flow_name', None) + self.existing_cluster_id = kwargs.get('existing_cluster_id', None) + self.cluster_timeout = kwargs.get('cluster_timeout', None) + self.new_cluster_name = kwargs.get('new_cluster_name', None) + self.new_cluster_node_type = kwargs.get('new_cluster_node_type', None) + self.data_bricks_linked_service = kwargs.get('data_bricks_linked_service', None) -class Expression(msrest.serialization.Model): - """Azure Synapse expression definition. - All required parameters must be populated in order to send to Azure. +class CreateDataFlowDebugSessionResponse(msrest.serialization.Model): + """Response body structure for creating data flow debug session. - :param type: Required. Expression type. Possible values include: "Expression". - :type type: str or ~azure.synapse.artifacts.models.ExpressionType - :param value: Required. Expression value. - :type value: str + :param session_id: The ID of data flow debug session. + :type session_id: str """ - _validation = { - 'type': {'required': True}, - 'value': {'required': True}, - } - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'str'}, + 'session_id': {'key': 'sessionId', 'type': 'str'}, } def __init__( self, **kwargs ): - super(Expression, self).__init__(**kwargs) - self.type = kwargs['type'] - self.value = kwargs['value'] + super(CreateDataFlowDebugSessionResponse, self).__init__(**kwargs) + self.session_id = kwargs.get('session_id', None) -class FileServerLinkedService(LinkedService): - """File system linked service. +class CreateRunResponse(msrest.serialization.Model): + """Response body with a run identifier. All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param host: Required. Host name of the server. Type: string (or Expression with resultType - string). - :type host: object - :param user_id: User ID to logon the server. Type: string (or Expression with resultType - string). - :type user_id: object - :param password: Password to logon the server. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param run_id: Required. Identifier of a run. + :type run_id: str """ _validation = { - 'type': {'required': True}, - 'host': {'required': True}, + 'run_id': {'required': True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'user_id': {'key': 'typeProperties.userId', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'run_id': {'key': 'runId', 'type': 'str'}, } def __init__( self, **kwargs ): - super(FileServerLinkedService, self).__init__(**kwargs) - self.type = 'FileServer' # type: str - self.host = kwargs['host'] - self.user_id = kwargs.get('user_id', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(CreateRunResponse, self).__init__(**kwargs) + self.run_id = kwargs['run_id'] -class FilterActivity(Activity): - """Filter and return results from input array based on the conditions. +class CustomActivity(ExecutionActivity): + """Custom activity type. All required parameters must be populated in order to send to Azure. @@ -7834,17 +8262,33 @@ class FilterActivity(Activity): :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param items: Required. Input array on which filter should be applied. - :type items: ~azure.synapse.artifacts.models.Expression - :param condition: Required. Condition to be used for filtering the input. - :type condition: ~azure.synapse.artifacts.models.Expression + :param linked_service_name: Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param command: Required. Command for custom activity Type: string (or Expression with + resultType string). + :type command: object + :param resource_linked_service: Resource linked service reference. + :type resource_linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference + :param folder_path: Folder path for resource files Type: string (or Expression with resultType + string). + :type folder_path: object + :param reference_objects: Reference objects. + :type reference_objects: ~azure.synapse.artifacts.models.CustomActivityReferenceObject + :param extended_properties: User defined property bag. There is no restriction on the keys or + values that can be used. The user specified custom activity has the full responsibility to + consume and interpret the content defined. + :type extended_properties: dict[str, object] + :param retention_time_in_days: The retention time for the files submitted for custom activity. + Type: double (or Expression with resultType double). + :type retention_time_in_days: object """ _validation = { 'name': {'required': True}, 'type': {'required': True}, - 'items': {'required': True}, - 'condition': {'required': True}, + 'command': {'required': True}, } _attribute_map = { @@ -7854,84 +8298,113 @@ class FilterActivity(Activity): 'description': {'key': 'description', 'type': 'str'}, 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'items': {'key': 'typeProperties.items', 'type': 'Expression'}, - 'condition': {'key': 'typeProperties.condition', 'type': 'Expression'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'command': {'key': 'typeProperties.command', 'type': 'object'}, + 'resource_linked_service': {'key': 'typeProperties.resourceLinkedService', 'type': 'LinkedServiceReference'}, + 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, + 'reference_objects': {'key': 'typeProperties.referenceObjects', 'type': 'CustomActivityReferenceObject'}, + 'extended_properties': {'key': 'typeProperties.extendedProperties', 'type': '{object}'}, + 'retention_time_in_days': {'key': 'typeProperties.retentionTimeInDays', 'type': 'object'}, } def __init__( self, **kwargs ): - super(FilterActivity, self).__init__(**kwargs) - self.type = 'Filter' # type: str - self.items = kwargs['items'] - self.condition = kwargs['condition'] + super(CustomActivity, self).__init__(**kwargs) + self.type = 'Custom' # type: str + self.command = kwargs['command'] + self.resource_linked_service = kwargs.get('resource_linked_service', None) + self.folder_path = kwargs.get('folder_path', None) + self.reference_objects = kwargs.get('reference_objects', None) + self.extended_properties = kwargs.get('extended_properties', None) + self.retention_time_in_days = kwargs.get('retention_time_in_days', None) -class ForEachActivity(Activity): - """This activity is used for iterating over a collection and execute given activities. +class CustomActivityReferenceObject(msrest.serialization.Model): + """Reference objects for custom activity. - All required parameters must be populated in order to send to Azure. + :param linked_services: Linked service references. + :type linked_services: list[~azure.synapse.artifacts.models.LinkedServiceReference] + :param datasets: Dataset references. + :type datasets: list[~azure.synapse.artifacts.models.DatasetReference] + """ - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param is_sequential: Should the loop be executed in sequence or in parallel (max 50). - :type is_sequential: bool - :param batch_count: Batch count to be used for controlling the number of parallel execution - (when isSequential is set to false). - :type batch_count: int - :param items: Required. Collection to iterate. - :type items: ~azure.synapse.artifacts.models.Expression - :param activities: Required. List of activities to execute . - :type activities: list[~azure.synapse.artifacts.models.Activity] + _attribute_map = { + 'linked_services': {'key': 'linkedServices', 'type': '[LinkedServiceReference]'}, + 'datasets': {'key': 'datasets', 'type': '[DatasetReference]'}, + } + + def __init__( + self, + **kwargs + ): + super(CustomActivityReferenceObject, self).__init__(**kwargs) + self.linked_services = kwargs.get('linked_services', None) + self.datasets = kwargs.get('datasets', None) + + +class CustomDataset(Dataset): + """The custom dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param type_properties: Custom dataset properties. + :type type_properties: object """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, - 'batch_count': {'maximum': 50}, - 'items': {'required': True}, - 'activities': {'required': True}, + 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'is_sequential': {'key': 'typeProperties.isSequential', 'type': 'bool'}, - 'batch_count': {'key': 'typeProperties.batchCount', 'type': 'int'}, - 'items': {'key': 'typeProperties.items', 'type': 'Expression'}, - 'activities': {'key': 'typeProperties.activities', 'type': '[Activity]'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type_properties': {'key': 'typeProperties', 'type': 'object'}, } def __init__( self, **kwargs ): - super(ForEachActivity, self).__init__(**kwargs) - self.type = 'ForEach' # type: str - self.is_sequential = kwargs.get('is_sequential', None) - self.batch_count = kwargs.get('batch_count', None) - self.items = kwargs['items'] - self.activities = kwargs['activities'] + super(CustomDataset, self).__init__(**kwargs) + self.type = 'CustomDataset' # type: str + self.type_properties = kwargs.get('type_properties', None) -class FtpServerLinkedService(LinkedService): - """A FTP server Linked Service. +class CustomDataSourceLinkedService(LinkedService): + """Custom linked service. All required parameters must be populated in order to send to Azure. @@ -7948,36 +8421,13 @@ class FtpServerLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param host: Required. Host name of the FTP server. Type: string (or Expression with resultType - string). - :type host: object - :param port: The TCP port number that the FTP server uses to listen for client connections. - Default value is 21. Type: integer (or Expression with resultType integer), minimum: 0. - :type port: object - :param authentication_type: The authentication type to be used to connect to the FTP server. - Possible values include: "Basic", "Anonymous". - :type authentication_type: str or ~azure.synapse.artifacts.models.FtpAuthenticationType - :param user_name: Username to logon the FTP server. Type: string (or Expression with resultType - string). - :type user_name: object - :param password: Password to logon the FTP server. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - :param enable_ssl: If true, connect to the FTP server over SSL/TLS channel. Default value is - true. Type: boolean (or Expression with resultType boolean). - :type enable_ssl: object - :param enable_server_certificate_validation: If true, validate the FTP server SSL certificate - when connect over SSL/TLS channel. Default value is true. Type: boolean (or Expression with - resultType boolean). - :type enable_server_certificate_validation: object + :param type_properties: Required. Custom linked service properties. + :type type_properties: object """ _validation = { 'type': {'required': True}, - 'host': {'required': True}, + 'type_properties': {'required': True}, } _attribute_map = { @@ -7987,34 +8437,52 @@ class FtpServerLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, + 'type_properties': {'key': 'typeProperties', 'type': 'object'}, } def __init__( self, **kwargs ): - super(FtpServerLinkedService, self).__init__(**kwargs) - self.type = 'FtpServer' # type: str - self.host = kwargs['host'] - self.port = kwargs.get('port', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.enable_ssl = kwargs.get('enable_ssl', None) - self.enable_server_certificate_validation = kwargs.get('enable_server_certificate_validation', None) + super(CustomDataSourceLinkedService, self).__init__(**kwargs) + self.type = 'CustomDataSource' # type: str + self.type_properties = kwargs['type_properties'] -class GetMetadataActivity(ExecutionActivity): - """Activity to get metadata of dataset. +class CustomSetupBase(msrest.serialization.Model): + """The base definition of the custom setup. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: . + + All required parameters must be populated in order to send to Azure. + + :param type: Required. The type of custom setup.Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {} + } + + def __init__( + self, + **kwargs + ): + super(CustomSetupBase, self).__init__(**kwargs) + self.type = None # type: Optional[str] + + +class DatabricksNotebookActivity(ExecutionActivity): + """DatabricksNotebook activity. All required parameters must be populated in order to send to Azure. @@ -8035,16 +8503,21 @@ class GetMetadataActivity(ExecutionActivity): :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param dataset: Required. GetMetadata activity dataset reference. - :type dataset: ~azure.synapse.artifacts.models.DatasetReference - :param field_list: Fields of metadata to get from dataset. - :type field_list: list[object] + :param notebook_path: Required. The absolute path of the notebook to be run in the Databricks + Workspace. This path must begin with a slash. Type: string (or Expression with resultType + string). + :type notebook_path: object + :param base_parameters: Base parameters to be used for each run of this job.If the notebook + takes a parameter that is not specified, the default value from the notebook will be used. + :type base_parameters: dict[str, object] + :param libraries: A list of libraries to be installed on the cluster that will execute the job. + :type libraries: list[dict[str, object]] """ _validation = { 'name': {'required': True}, 'type': {'required': True}, - 'dataset': {'required': True}, + 'notebook_path': {'required': True}, } _attribute_map = { @@ -8056,1031 +8529,8826 @@ class GetMetadataActivity(ExecutionActivity): 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, - 'field_list': {'key': 'typeProperties.fieldList', 'type': '[object]'}, + 'notebook_path': {'key': 'typeProperties.notebookPath', 'type': 'object'}, + 'base_parameters': {'key': 'typeProperties.baseParameters', 'type': '{object}'}, + 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, } def __init__( self, **kwargs ): - super(GetMetadataActivity, self).__init__(**kwargs) - self.type = 'GetMetadata' # type: str - self.dataset = kwargs['dataset'] - self.field_list = kwargs.get('field_list', None) + super(DatabricksNotebookActivity, self).__init__(**kwargs) + self.type = 'DatabricksNotebook' # type: str + self.notebook_path = kwargs['notebook_path'] + self.base_parameters = kwargs.get('base_parameters', None) + self.libraries = kwargs.get('libraries', None) -class GetSsisObjectMetadataRequest(msrest.serialization.Model): - """The request payload of get SSIS object metadata. +class DatabricksSparkJarActivity(ExecutionActivity): + """DatabricksSparkJar activity. - :param metadata_path: Metadata path. - :type metadata_path: str + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param linked_service_name: Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param main_class_name: Required. The full name of the class containing the main method to be + executed. This class must be contained in a JAR provided as a library. Type: string (or + Expression with resultType string). + :type main_class_name: object + :param parameters: Parameters that will be passed to the main method. + :type parameters: list[object] + :param libraries: A list of libraries to be installed on the cluster that will execute the job. + :type libraries: list[dict[str, object]] """ + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'main_class_name': {'required': True}, + } + _attribute_map = { - 'metadata_path': {'key': 'metadataPath', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'main_class_name': {'key': 'typeProperties.mainClassName', 'type': 'object'}, + 'parameters': {'key': 'typeProperties.parameters', 'type': '[object]'}, + 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, } def __init__( self, **kwargs ): - super(GetSsisObjectMetadataRequest, self).__init__(**kwargs) - self.metadata_path = kwargs.get('metadata_path', None) + super(DatabricksSparkJarActivity, self).__init__(**kwargs) + self.type = 'DatabricksSparkJar' # type: str + self.main_class_name = kwargs['main_class_name'] + self.parameters = kwargs.get('parameters', None) + self.libraries = kwargs.get('libraries', None) -class GoogleAdWordsLinkedService(LinkedService): - """Google AdWords service linked service. +class DatabricksSparkPythonActivity(ExecutionActivity): + """DatabricksSparkPython activity. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. + :param description: Activity description. :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param client_customer_id: Required. The Client customer ID of the AdWords account that you - want to fetch report data for. - :type client_customer_id: object - :param developer_token: Required. The developer token associated with the manager account that - you use to grant access to the AdWords API. - :type developer_token: ~azure.synapse.artifacts.models.SecretBase - :param authentication_type: Required. The OAuth 2.0 authentication mechanism used for - authentication. ServiceAuthentication can only be used on self-hosted IR. Possible values - include: "ServiceAuthentication", "UserAuthentication". - :type authentication_type: str or - ~azure.synapse.artifacts.models.GoogleAdWordsAuthenticationType - :param refresh_token: The refresh token obtained from Google for authorizing access to AdWords - for UserAuthentication. - :type refresh_token: ~azure.synapse.artifacts.models.SecretBase - :param client_id: The client id of the google application used to acquire the refresh token. - Type: string (or Expression with resultType string). - :type client_id: object - :param client_secret: The client secret of the google application used to acquire the refresh - token. - :type client_secret: ~azure.synapse.artifacts.models.SecretBase - :param email: The service account email ID that is used for ServiceAuthentication and can only - be used on self-hosted IR. - :type email: object - :param key_file_path: The full path to the .p12 key file that is used to authenticate the - service account email address and can only be used on self-hosted IR. - :type key_file_path: object - :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for - verifying the server when connecting over SSL. This property can only be set when using SSL on - self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust - store or from a specified PEM file. The default value is false. - :type use_system_trust_store: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param linked_service_name: Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param python_file: Required. The URI of the Python file to be executed. DBFS paths are + supported. Type: string (or Expression with resultType string). + :type python_file: object + :param parameters: Command line parameters that will be passed to the Python file. + :type parameters: list[object] + :param libraries: A list of libraries to be installed on the cluster that will execute the job. + :type libraries: list[dict[str, object]] """ _validation = { + 'name': {'required': True}, 'type': {'required': True}, - 'client_customer_id': {'required': True}, - 'developer_token': {'required': True}, - 'authentication_type': {'required': True}, + 'python_file': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'client_customer_id': {'key': 'typeProperties.clientCustomerID', 'type': 'object'}, - 'developer_token': {'key': 'typeProperties.developerToken', 'type': 'SecretBase'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'email': {'key': 'typeProperties.email', 'type': 'object'}, - 'key_file_path': {'key': 'typeProperties.keyFilePath', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'python_file': {'key': 'typeProperties.pythonFile', 'type': 'object'}, + 'parameters': {'key': 'typeProperties.parameters', 'type': '[object]'}, + 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, } def __init__( self, **kwargs ): - super(GoogleAdWordsLinkedService, self).__init__(**kwargs) - self.type = 'GoogleAdWords' # type: str - self.client_customer_id = kwargs['client_customer_id'] - self.developer_token = kwargs['developer_token'] - self.authentication_type = kwargs['authentication_type'] - self.refresh_token = kwargs.get('refresh_token', None) - self.client_id = kwargs.get('client_id', None) - self.client_secret = kwargs.get('client_secret', None) - self.email = kwargs.get('email', None) - self.key_file_path = kwargs.get('key_file_path', None) - self.trusted_cert_path = kwargs.get('trusted_cert_path', None) - self.use_system_trust_store = kwargs.get('use_system_trust_store', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(DatabricksSparkPythonActivity, self).__init__(**kwargs) + self.type = 'DatabricksSparkPython' # type: str + self.python_file = kwargs['python_file'] + self.parameters = kwargs.get('parameters', None) + self.libraries = kwargs.get('libraries', None) -class GoogleAdWordsObjectDataset(Dataset): - """Google AdWords service dataset. +class DataFlow(msrest.serialization.Model): + """Azure Synapse nested object which contains a flow with data movements and transformations. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: MappingDataFlow. All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Type of data flow.Constant filled by server. :type type: str - :param description: Dataset description. + :param description: The description of the data flow. :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. + :param annotations: List of tags that can be used for describing the data flow. :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :param folder: The folder that this data flow is in. If not specified, Data flow will appear at + the root level. + :type folder: ~azure.synapse.artifacts.models.DataFlowFolder """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'folder': {'key': 'folder', 'type': 'DataFlowFolder'}, + } + + _subtype_map = { + 'type': {'MappingDataFlow': 'MappingDataFlow'} } def __init__( self, **kwargs ): - super(GoogleAdWordsObjectDataset, self).__init__(**kwargs) - self.type = 'GoogleAdWordsObject' # type: str - self.table_name = kwargs.get('table_name', None) + super(DataFlow, self).__init__(**kwargs) + self.type = None # type: Optional[str] + self.description = kwargs.get('description', None) + self.annotations = kwargs.get('annotations', None) + self.folder = kwargs.get('folder', None) -class GoogleBigQueryLinkedService(LinkedService): - """Google BigQuery service linked service. +class DataFlowDebugCommandRequest(msrest.serialization.Model): + """Request body structure for data flow expression preview. All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param project: Required. The default BigQuery project to query against. - :type project: object - :param additional_projects: A comma-separated list of public BigQuery projects to access. - :type additional_projects: object - :param request_google_drive_scope: Whether to request access to Google Drive. Allowing Google - Drive access enables support for federated tables that combine BigQuery data with data from - Google Drive. The default value is false. - :type request_google_drive_scope: object - :param authentication_type: Required. The OAuth 2.0 authentication mechanism used for - authentication. ServiceAuthentication can only be used on self-hosted IR. Possible values - include: "ServiceAuthentication", "UserAuthentication". - :type authentication_type: str or - ~azure.synapse.artifacts.models.GoogleBigQueryAuthenticationType - :param refresh_token: The refresh token obtained from Google for authorizing access to BigQuery - for UserAuthentication. - :type refresh_token: ~azure.synapse.artifacts.models.SecretBase - :param client_id: The client id of the google application used to acquire the refresh token. - Type: string (or Expression with resultType string). - :type client_id: object - :param client_secret: The client secret of the google application used to acquire the refresh - token. - :type client_secret: ~azure.synapse.artifacts.models.SecretBase - :param email: The service account email ID that is used for ServiceAuthentication and can only - be used on self-hosted IR. - :type email: object - :param key_file_path: The full path to the .p12 key file that is used to authenticate the - service account email address and can only be used on self-hosted IR. - :type key_file_path: object - :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for - verifying the server when connecting over SSL. This property can only be set when using SSL on - self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust - store or from a specified PEM file. The default value is false. - :type use_system_trust_store: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param session_id: Required. The ID of data flow debug session. + :type session_id: str + :param data_flow_name: The data flow which contains the debug session. + :type data_flow_name: str + :param command_name: The command name. + :type command_name: str + :param command_payload: Required. The command payload object. + :type command_payload: object """ _validation = { - 'type': {'required': True}, - 'project': {'required': True}, - 'authentication_type': {'required': True}, + 'session_id': {'required': True}, + 'command_payload': {'required': True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'project': {'key': 'typeProperties.project', 'type': 'object'}, - 'additional_projects': {'key': 'typeProperties.additionalProjects', 'type': 'object'}, - 'request_google_drive_scope': {'key': 'typeProperties.requestGoogleDriveScope', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'email': {'key': 'typeProperties.email', 'type': 'object'}, - 'key_file_path': {'key': 'typeProperties.keyFilePath', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'session_id': {'key': 'sessionId', 'type': 'str'}, + 'data_flow_name': {'key': 'dataFlowName', 'type': 'str'}, + 'command_name': {'key': 'commandName', 'type': 'str'}, + 'command_payload': {'key': 'commandPayload', 'type': 'object'}, } def __init__( self, **kwargs ): - super(GoogleBigQueryLinkedService, self).__init__(**kwargs) - self.type = 'GoogleBigQuery' # type: str - self.project = kwargs['project'] - self.additional_projects = kwargs.get('additional_projects', None) - self.request_google_drive_scope = kwargs.get('request_google_drive_scope', None) - self.authentication_type = kwargs['authentication_type'] - self.refresh_token = kwargs.get('refresh_token', None) - self.client_id = kwargs.get('client_id', None) - self.client_secret = kwargs.get('client_secret', None) - self.email = kwargs.get('email', None) - self.key_file_path = kwargs.get('key_file_path', None) - self.trusted_cert_path = kwargs.get('trusted_cert_path', None) - self.use_system_trust_store = kwargs.get('use_system_trust_store', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - + super(DataFlowDebugCommandRequest, self).__init__(**kwargs) + self.session_id = kwargs['session_id'] + self.data_flow_name = kwargs.get('data_flow_name', None) + self.command_name = kwargs.get('command_name', None) + self.command_payload = kwargs['command_payload'] -class GoogleBigQueryObjectDataset(Dataset): - """Google BigQuery service dataset. - All required parameters must be populated in order to send to Azure. +class DataFlowDebugCommandResponse(msrest.serialization.Model): + """Response body structure of data flow result for data preview, statistics or expression preview. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: This property will be retired. Please consider using database + table - properties instead. - :type table_name: object - :param table: The table name of the Google BigQuery. Type: string (or Expression with - resultType string). - :type table: object - :param dataset: The database name of the Google BigQuery. Type: string (or Expression with - resultType string). - :type dataset: object + :param status: The run status of data preview, statistics or expression preview. + :type status: str + :param data: The result data of data preview, statistics or expression preview. + :type data: str """ - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'dataset': {'key': 'typeProperties.dataset', 'type': 'object'}, + 'status': {'key': 'status', 'type': 'str'}, + 'data': {'key': 'data', 'type': 'str'}, } def __init__( self, **kwargs ): - super(GoogleBigQueryObjectDataset, self).__init__(**kwargs) - self.type = 'GoogleBigQueryObject' # type: str - self.table_name = kwargs.get('table_name', None) - self.table = kwargs.get('table', None) - self.dataset = kwargs.get('dataset', None) - + super(DataFlowDebugCommandResponse, self).__init__(**kwargs) + self.status = kwargs.get('status', None) + self.data = kwargs.get('data', None) -class GoogleCloudStorageLinkedService(LinkedService): - """Linked service for Google Cloud Storage. - All required parameters must be populated in order to send to Azure. +class DataFlowDebugPackage(msrest.serialization.Model): + """Request body structure for starting data flow debug session. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param access_key_id: The access key identifier of the Google Cloud Storage Identity and Access - Management (IAM) user. Type: string (or Expression with resultType string). - :type access_key_id: object - :param secret_access_key: The secret access key of the Google Cloud Storage Identity and Access - Management (IAM) user. - :type secret_access_key: ~azure.synapse.artifacts.models.SecretBase - :param service_url: This value specifies the endpoint to access with the Google Cloud Storage - Connector. This is an optional property; change it only if you want to try a different service - endpoint or want to switch between https and http. Type: string (or Expression with resultType - string). - :type service_url: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param session_id: The ID of data flow debug session. + :type session_id: str + :param data_flow: Data flow instance. + :type data_flow: ~azure.synapse.artifacts.models.DataFlowDebugResource + :param datasets: List of datasets. + :type datasets: list[~azure.synapse.artifacts.models.DatasetDebugResource] + :param linked_services: List of linked services. + :type linked_services: list[~azure.synapse.artifacts.models.LinkedServiceDebugResource] + :param staging: Staging info for debug session. + :type staging: ~azure.synapse.artifacts.models.DataFlowStagingInfo + :param debug_settings: Data flow debug settings. + :type debug_settings: ~azure.synapse.artifacts.models.DataFlowDebugPackageDebugSettings """ - _validation = { - 'type': {'required': True}, - } - _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, - 'secret_access_key': {'key': 'typeProperties.secretAccessKey', 'type': 'SecretBase'}, - 'service_url': {'key': 'typeProperties.serviceUrl', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'session_id': {'key': 'sessionId', 'type': 'str'}, + 'data_flow': {'key': 'dataFlow', 'type': 'DataFlowDebugResource'}, + 'datasets': {'key': 'datasets', 'type': '[DatasetDebugResource]'}, + 'linked_services': {'key': 'linkedServices', 'type': '[LinkedServiceDebugResource]'}, + 'staging': {'key': 'staging', 'type': 'DataFlowStagingInfo'}, + 'debug_settings': {'key': 'debugSettings', 'type': 'DataFlowDebugPackageDebugSettings'}, } def __init__( self, **kwargs ): - super(GoogleCloudStorageLinkedService, self).__init__(**kwargs) - self.type = 'GoogleCloudStorage' # type: str - self.access_key_id = kwargs.get('access_key_id', None) - self.secret_access_key = kwargs.get('secret_access_key', None) - self.service_url = kwargs.get('service_url', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - + super(DataFlowDebugPackage, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.session_id = kwargs.get('session_id', None) + self.data_flow = kwargs.get('data_flow', None) + self.datasets = kwargs.get('datasets', None) + self.linked_services = kwargs.get('linked_services', None) + self.staging = kwargs.get('staging', None) + self.debug_settings = kwargs.get('debug_settings', None) -class GreenplumLinkedService(LinkedService): - """Greenplum Database linked service. - All required parameters must be populated in order to send to Azure. +class DataFlowDebugPackageDebugSettings(msrest.serialization.Model): + """Data flow debug settings. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: An ODBC connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param pwd: The Azure key vault secret reference of password in connection string. - :type pwd: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param source_settings: Source setting for data flow debug. + :type source_settings: list[~azure.synapse.artifacts.models.DataFlowSourceSetting] + :param parameters: Data flow parameters. + :type parameters: dict[str, object] + :param dataset_parameters: Parameters for dataset. + :type dataset_parameters: object """ - _validation = { - 'type': {'required': True}, + _attribute_map = { + 'source_settings': {'key': 'sourceSettings', 'type': '[DataFlowSourceSetting]'}, + 'parameters': {'key': 'parameters', 'type': '{object}'}, + 'dataset_parameters': {'key': 'datasetParameters', 'type': 'object'}, } + def __init__( + self, + **kwargs + ): + super(DataFlowDebugPackageDebugSettings, self).__init__(**kwargs) + self.source_settings = kwargs.get('source_settings', None) + self.parameters = kwargs.get('parameters', None) + self.dataset_parameters = kwargs.get('dataset_parameters', None) + + +class DataFlowDebugPreviewDataRequest(msrest.serialization.Model): + """Request body structure for data flow preview data. + + :param session_id: The ID of data flow debug session. + :type session_id: str + :param data_flow_name: The data flow which contains the debug session. + :type data_flow_name: str + :param stream_name: The output stream name. + :type stream_name: str + :param row_limits: The row limit for preview request. + :type row_limits: int + """ + _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'session_id': {'key': 'sessionId', 'type': 'str'}, + 'data_flow_name': {'key': 'dataFlowName', 'type': 'str'}, + 'stream_name': {'key': 'streamName', 'type': 'str'}, + 'row_limits': {'key': 'rowLimits', 'type': 'int'}, } def __init__( self, **kwargs ): - super(GreenplumLinkedService, self).__init__(**kwargs) - self.type = 'Greenplum' # type: str - self.connection_string = kwargs.get('connection_string', None) - self.pwd = kwargs.get('pwd', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(DataFlowDebugPreviewDataRequest, self).__init__(**kwargs) + self.session_id = kwargs.get('session_id', None) + self.data_flow_name = kwargs.get('data_flow_name', None) + self.stream_name = kwargs.get('stream_name', None) + self.row_limits = kwargs.get('row_limits', None) -class GreenplumTableDataset(Dataset): - """Greenplum Database dataset. +class DataFlowDebugQueryResponse(msrest.serialization.Model): + """Response body structure of data flow query for data preview, statistics or expression preview. - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table - properties instead. - :type table_name: object - :param table: The table name of Greenplum. Type: string (or Expression with resultType string). - :type table: object - :param schema_type_properties_schema: The schema name of Greenplum. Type: string (or Expression - with resultType string). - :type schema_type_properties_schema: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } + :param run_id: The run ID of data flow debug session. + :type run_id: str + """ _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'run_id': {'key': 'runId', 'type': 'str'}, } def __init__( self, **kwargs ): - super(GreenplumTableDataset, self).__init__(**kwargs) - self.type = 'GreenplumTable' # type: str - self.table_name = kwargs.get('table_name', None) - self.table = kwargs.get('table', None) - self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) - + super(DataFlowDebugQueryResponse, self).__init__(**kwargs) + self.run_id = kwargs.get('run_id', None) -class HBaseLinkedService(LinkedService): - """HBase server linked service. - All required parameters must be populated in order to send to Azure. +class SubResourceDebugResource(msrest.serialization.Model): + """Azure Synapse nested debug resource. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param host: Required. The IP address or host name of the HBase server. (i.e. 192.168.222.160). - :type host: object - :param port: The TCP port that the HBase instance uses to listen for client connections. The - default value is 9090. - :type port: object - :param http_path: The partial URL corresponding to the HBase server. (i.e. - /gateway/sandbox/hbase/version). - :type http_path: object - :param authentication_type: Required. The authentication mechanism to use to connect to the - HBase server. Possible values include: "Anonymous", "Basic". - :type authentication_type: str or ~azure.synapse.artifacts.models.HBaseAuthenticationType - :param username: The user name used to connect to the HBase instance. - :type username: object - :param password: The password corresponding to the user name. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The - default value is false. - :type enable_ssl: object - :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for - verifying the server when connecting over SSL. This property can only be set when using SSL on - self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate - name to match the host name of the server when connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: object - :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from - the server. The default value is false. - :type allow_self_signed_server_cert: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param name: The resource name. + :type name: str """ - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - 'authentication_type': {'required': True}, - } - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, - 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'name': {'key': 'name', 'type': 'str'}, } def __init__( self, **kwargs ): - super(HBaseLinkedService, self).__init__(**kwargs) - self.type = 'HBase' # type: str - self.host = kwargs['host'] - self.port = kwargs.get('port', None) - self.http_path = kwargs.get('http_path', None) - self.authentication_type = kwargs['authentication_type'] - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.enable_ssl = kwargs.get('enable_ssl', None) - self.trusted_cert_path = kwargs.get('trusted_cert_path', None) - self.allow_host_name_cn_mismatch = kwargs.get('allow_host_name_cn_mismatch', None) - self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(SubResourceDebugResource, self).__init__(**kwargs) + self.name = kwargs.get('name', None) -class HBaseObjectDataset(Dataset): - """HBase server dataset. +class DataFlowDebugResource(SubResourceDebugResource): + """Data flow debug resource. All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :param name: The resource name. + :type name: str + :param properties: Required. Data flow properties. + :type properties: ~azure.synapse.artifacts.models.DataFlow """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + 'properties': {'required': True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'name': {'key': 'name', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'DataFlow'}, } def __init__( self, **kwargs ): - super(HBaseObjectDataset, self).__init__(**kwargs) - self.type = 'HBaseObject' # type: str - self.table_name = kwargs.get('table_name', None) - + super(DataFlowDebugResource, self).__init__(**kwargs) + self.properties = kwargs['properties'] -class HdfsLinkedService(LinkedService): - """Hadoop Distributed File System (HDFS) linked service. - All required parameters must be populated in order to send to Azure. +class DataFlowDebugResultResponse(msrest.serialization.Model): + """Response body structure of data flow result for data preview, statistics or expression preview. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param url: Required. The URL of the HDFS service endpoint, e.g. - http://myhostname:50070/webhdfs/v1 . Type: string (or Expression with resultType string). - :type url: object - :param authentication_type: Type of authentication used to connect to the HDFS. Possible values - are: Anonymous and Windows. Type: string (or Expression with resultType string). - :type authentication_type: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - :param user_name: User name for Windows authentication. Type: string (or Expression with - resultType string). - :type user_name: object - :param password: Password for Windows authentication. - :type password: ~azure.synapse.artifacts.models.SecretBase + :param status: The run status of data preview, statistics or expression preview. + :type status: str + :param data: The result data of data preview, statistics or expression preview. + :type data: str """ - _validation = { - 'type': {'required': True}, - 'url': {'required': True}, - } - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'status': {'key': 'status', 'type': 'str'}, + 'data': {'key': 'data', 'type': 'str'}, } def __init__( self, **kwargs ): - super(HdfsLinkedService, self).__init__(**kwargs) - self.type = 'Hdfs' # type: str - self.url = kwargs['url'] - self.authentication_type = kwargs.get('authentication_type', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - + super(DataFlowDebugResultResponse, self).__init__(**kwargs) + self.status = kwargs.get('status', None) + self.data = kwargs.get('data', None) -class HDInsightHiveActivity(ExecutionActivity): - """HDInsight Hive activity type. - All required parameters must be populated in order to send to Azure. +class DataFlowDebugSessionInfo(msrest.serialization.Model): + """Data flow debug session info. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param storage_linked_services: Storage linked service references. - :type storage_linked_services: list[~azure.synapse.artifacts.models.LinkedServiceReference] - :param arguments: User specified arguments to HDInsightActivity. - :type arguments: list[object] - :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". - :type get_debug_info: str or ~azure.synapse.artifacts.models.HDInsightActivityDebugInfoOption - :param script_path: Script path. Type: string (or Expression with resultType string). - :type script_path: object - :param script_linked_service: Script linked service reference. - :type script_linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference - :param defines: Allows user to specify defines for Hive job request. - :type defines: dict[str, object] - :param variables: User specified arguments under hivevar namespace. - :type variables: list[object] - :param query_timeout: Query timeout value (in minutes). Effective when the HDInsight cluster - is with ESP (Enterprise Security Package). - :type query_timeout: int + :param data_flow_name: The name of the data flow. + :type data_flow_name: str + :param compute_type: Compute type of the cluster. + :type compute_type: str + :param core_count: Core count of the cluster. + :type core_count: int + :param node_count: Node count of the cluster. (deprecated property). + :type node_count: int + :param integration_runtime_name: Attached integration runtime name of data flow debug session. + :type integration_runtime_name: str + :param session_id: The ID of data flow debug session. + :type session_id: str + :param start_time: Start time of data flow debug session. + :type start_time: str + :param time_to_live_in_minutes: Compute type of the cluster. + :type time_to_live_in_minutes: int + :param last_activity_time: Last activity time of data flow debug session. + :type last_activity_time: str """ - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'data_flow_name': {'key': 'dataFlowName', 'type': 'str'}, + 'compute_type': {'key': 'computeType', 'type': 'str'}, + 'core_count': {'key': 'coreCount', 'type': 'int'}, + 'node_count': {'key': 'nodeCount', 'type': 'int'}, + 'integration_runtime_name': {'key': 'integrationRuntimeName', 'type': 'str'}, + 'session_id': {'key': 'sessionId', 'type': 'str'}, + 'start_time': {'key': 'startTime', 'type': 'str'}, + 'time_to_live_in_minutes': {'key': 'timeToLiveInMinutes', 'type': 'int'}, + 'last_activity_time': {'key': 'lastActivityTime', 'type': 'str'}, } + def __init__( + self, + **kwargs + ): + super(DataFlowDebugSessionInfo, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.data_flow_name = kwargs.get('data_flow_name', None) + self.compute_type = kwargs.get('compute_type', None) + self.core_count = kwargs.get('core_count', None) + self.node_count = kwargs.get('node_count', None) + self.integration_runtime_name = kwargs.get('integration_runtime_name', None) + self.session_id = kwargs.get('session_id', None) + self.start_time = kwargs.get('start_time', None) + self.time_to_live_in_minutes = kwargs.get('time_to_live_in_minutes', None) + self.last_activity_time = kwargs.get('last_activity_time', None) + + +class DataFlowDebugStatisticsRequest(msrest.serialization.Model): + """Request body structure for data flow statistics. + + :param session_id: The ID of data flow debug session. + :type session_id: str + :param data_flow_name: The data flow which contains the debug session. + :type data_flow_name: str + :param stream_name: The output stream name. + :type stream_name: str + :param columns: List of column names. + :type columns: list[str] + """ + + _attribute_map = { + 'session_id': {'key': 'sessionId', 'type': 'str'}, + 'data_flow_name': {'key': 'dataFlowName', 'type': 'str'}, + 'stream_name': {'key': 'streamName', 'type': 'str'}, + 'columns': {'key': 'columns', 'type': '[str]'}, + } + + def __init__( + self, + **kwargs + ): + super(DataFlowDebugStatisticsRequest, self).__init__(**kwargs) + self.session_id = kwargs.get('session_id', None) + self.data_flow_name = kwargs.get('data_flow_name', None) + self.stream_name = kwargs.get('stream_name', None) + self.columns = kwargs.get('columns', None) + + +class DataFlowFolder(msrest.serialization.Model): + """The folder that this data flow is in. If not specified, Data flow will appear at the root level. + + :param name: The name of the folder that this data flow is in. + :type name: str + """ + _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, - 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, - 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, - 'script_path': {'key': 'typeProperties.scriptPath', 'type': 'object'}, - 'script_linked_service': {'key': 'typeProperties.scriptLinkedService', 'type': 'LinkedServiceReference'}, - 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, - 'variables': {'key': 'typeProperties.variables', 'type': '[object]'}, - 'query_timeout': {'key': 'typeProperties.queryTimeout', 'type': 'int'}, } def __init__( self, **kwargs ): - super(HDInsightHiveActivity, self).__init__(**kwargs) - self.type = 'HDInsightHive' # type: str - self.storage_linked_services = kwargs.get('storage_linked_services', None) - self.arguments = kwargs.get('arguments', None) - self.get_debug_info = kwargs.get('get_debug_info', None) - self.script_path = kwargs.get('script_path', None) - self.script_linked_service = kwargs.get('script_linked_service', None) - self.defines = kwargs.get('defines', None) - self.variables = kwargs.get('variables', None) - self.query_timeout = kwargs.get('query_timeout', None) + super(DataFlowFolder, self).__init__(**kwargs) + self.name = kwargs.get('name', None) -class HDInsightLinkedService(LinkedService): - """HDInsight linked service. +class DataFlowListResponse(msrest.serialization.Model): + """A list of data flow resources. All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param cluster_uri: Required. HDInsight cluster URI. Type: string (or Expression with - resultType string). - :type cluster_uri: object - :param user_name: HDInsight cluster user name. Type: string (or Expression with resultType - string). - :type user_name: object - :param password: HDInsight cluster password. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param linked_service_name: The Azure Storage linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param hcatalog_linked_service_name: A reference to the Azure SQL linked service that points to - the HCatalog database. - :type hcatalog_linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - :param is_esp_enabled: Specify if the HDInsight is created with ESP (Enterprise Security - Package). Type: Boolean. - :type is_esp_enabled: object - :param file_system: Specify the FileSystem if the main storage for the HDInsight is ADLS Gen2. - Type: string (or Expression with resultType string). - :type file_system: object + :param value: Required. List of data flows. + :type value: list[~azure.synapse.artifacts.models.DataFlowResource] + :param next_link: The link to the next page of results, if any remaining results exist. + :type next_link: str """ _validation = { - 'type': {'required': True}, - 'cluster_uri': {'required': True}, + 'value': {'required': True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'cluster_uri': {'key': 'typeProperties.clusterUri', 'type': 'object'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, - 'hcatalog_linked_service_name': {'key': 'typeProperties.hcatalogLinkedServiceName', 'type': 'LinkedServiceReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'is_esp_enabled': {'key': 'typeProperties.isEspEnabled', 'type': 'object'}, - 'file_system': {'key': 'typeProperties.fileSystem', 'type': 'object'}, + 'value': {'key': 'value', 'type': '[DataFlowResource]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, } def __init__( self, **kwargs ): - super(HDInsightLinkedService, self).__init__(**kwargs) - self.type = 'HDInsight' # type: str - self.cluster_uri = kwargs['cluster_uri'] - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.linked_service_name = kwargs.get('linked_service_name', None) - self.hcatalog_linked_service_name = kwargs.get('hcatalog_linked_service_name', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.is_esp_enabled = kwargs.get('is_esp_enabled', None) - self.file_system = kwargs.get('file_system', None) + super(DataFlowListResponse, self).__init__(**kwargs) + self.value = kwargs['value'] + self.next_link = kwargs.get('next_link', None) -class HDInsightMapReduceActivity(ExecutionActivity): - """HDInsight MapReduce activity type. +class DataFlowReference(msrest.serialization.Model): + """Data flow reference type. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param storage_linked_services: Storage linked service references. - :type storage_linked_services: list[~azure.synapse.artifacts.models.LinkedServiceReference] - :param arguments: User specified arguments to HDInsightActivity. - :type arguments: list[object] - :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". - :type get_debug_info: str or ~azure.synapse.artifacts.models.HDInsightActivityDebugInfoOption - :param class_name: Required. Class name. Type: string (or Expression with resultType string). - :type class_name: object - :param jar_file_path: Required. Jar path. Type: string (or Expression with resultType string). - :type jar_file_path: object - :param jar_linked_service: Jar linked service reference. - :type jar_linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference - :param jar_libs: Jar libs. - :type jar_libs: list[object] - :param defines: Allows user to specify defines for the MapReduce job request. - :type defines: dict[str, object] + :param type: Required. Data flow reference type. Possible values include: "DataFlowReference". + :type type: str or ~azure.synapse.artifacts.models.DataFlowReferenceType + :param reference_name: Required. Reference data flow name. + :type reference_name: str + :param dataset_parameters: Reference data flow parameters from dataset. + :type dataset_parameters: object """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, - 'class_name': {'required': True}, - 'jar_file_path': {'required': True}, + 'reference_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + 'dataset_parameters': {'key': 'datasetParameters', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(DataFlowReference, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = kwargs['type'] + self.reference_name = kwargs['reference_name'] + self.dataset_parameters = kwargs.get('dataset_parameters', None) + + +class DataFlowResource(AzureEntityResource): + """Data flow resource type. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + :ivar etag: Resource Etag. + :vartype etag: str + :param properties: Required. Data flow properties. + :type properties: ~azure.synapse.artifacts.models.DataFlow + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'DataFlow'}, + } + + def __init__( + self, + **kwargs + ): + super(DataFlowResource, self).__init__(**kwargs) + self.properties = kwargs['properties'] + + +class Transformation(msrest.serialization.Model): + """A data flow transformation. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. Transformation name. + :type name: str + :param description: Transformation description. + :type description: str + """ + + _validation = { + 'name': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, - 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, - 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, - 'class_name': {'key': 'typeProperties.className', 'type': 'object'}, - 'jar_file_path': {'key': 'typeProperties.jarFilePath', 'type': 'object'}, - 'jar_linked_service': {'key': 'typeProperties.jarLinkedService', 'type': 'LinkedServiceReference'}, - 'jar_libs': {'key': 'typeProperties.jarLibs', 'type': '[object]'}, - 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, } def __init__( self, **kwargs ): - super(HDInsightMapReduceActivity, self).__init__(**kwargs) - self.type = 'HDInsightMapReduce' # type: str - self.storage_linked_services = kwargs.get('storage_linked_services', None) - self.arguments = kwargs.get('arguments', None) - self.get_debug_info = kwargs.get('get_debug_info', None) - self.class_name = kwargs['class_name'] - self.jar_file_path = kwargs['jar_file_path'] - self.jar_linked_service = kwargs.get('jar_linked_service', None) - self.jar_libs = kwargs.get('jar_libs', None) - self.defines = kwargs.get('defines', None) + super(Transformation, self).__init__(**kwargs) + self.name = kwargs['name'] + self.description = kwargs.get('description', None) + + +class DataFlowSink(Transformation): + """Transformation for data flow sink. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. Transformation name. + :type name: str + :param description: Transformation description. + :type description: str + :param dataset: Dataset reference. + :type dataset: ~azure.synapse.artifacts.models.DatasetReference + """ + + _validation = { + 'name': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'dataset': {'key': 'dataset', 'type': 'DatasetReference'}, + } + + def __init__( + self, + **kwargs + ): + super(DataFlowSink, self).__init__(**kwargs) + self.dataset = kwargs.get('dataset', None) + + +class DataFlowSource(Transformation): + """Transformation for data flow source. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. Transformation name. + :type name: str + :param description: Transformation description. + :type description: str + :param dataset: Dataset reference. + :type dataset: ~azure.synapse.artifacts.models.DatasetReference + """ + + _validation = { + 'name': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'dataset': {'key': 'dataset', 'type': 'DatasetReference'}, + } + + def __init__( + self, + **kwargs + ): + super(DataFlowSource, self).__init__(**kwargs) + self.dataset = kwargs.get('dataset', None) + + +class DataFlowSourceSetting(msrest.serialization.Model): + """Definition of data flow source setting for debug. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param source_name: The data flow source name. + :type source_name: str + :param row_limit: Defines the row limit of data flow source in debug. + :type row_limit: int + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_name': {'key': 'sourceName', 'type': 'str'}, + 'row_limit': {'key': 'rowLimit', 'type': 'int'}, + } + + def __init__( + self, + **kwargs + ): + super(DataFlowSourceSetting, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.source_name = kwargs.get('source_name', None) + self.row_limit = kwargs.get('row_limit', None) + + +class DataFlowStagingInfo(msrest.serialization.Model): + """Staging info for execute data flow activity. + + :param linked_service: Staging linked service reference. + :type linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference + :param folder_path: Folder path for staging blob. + :type folder_path: str + """ + + _attribute_map = { + 'linked_service': {'key': 'linkedService', 'type': 'LinkedServiceReference'}, + 'folder_path': {'key': 'folderPath', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(DataFlowStagingInfo, self).__init__(**kwargs) + self.linked_service = kwargs.get('linked_service', None) + self.folder_path = kwargs.get('folder_path', None) + + +class DataLakeAnalyticsUSQLActivity(ExecutionActivity): + """Data Lake Analytics U-SQL activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param linked_service_name: Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param script_path: Required. Case-sensitive path to folder that contains the U-SQL script. + Type: string (or Expression with resultType string). + :type script_path: object + :param script_linked_service: Required. Script linked service reference. + :type script_linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference + :param degree_of_parallelism: The maximum number of nodes simultaneously used to run the job. + Default value is 1. Type: integer (or Expression with resultType integer), minimum: 1. + :type degree_of_parallelism: object + :param priority: Determines which jobs out of all that are queued should be selected to run + first. The lower the number, the higher the priority. Default value is 1000. Type: integer (or + Expression with resultType integer), minimum: 1. + :type priority: object + :param parameters: Parameters for U-SQL job request. + :type parameters: dict[str, object] + :param runtime_version: Runtime version of the U-SQL engine to use. Type: string (or Expression + with resultType string). + :type runtime_version: object + :param compilation_mode: Compilation mode of U-SQL. Must be one of these values : Semantic, + Full and SingleBox. Type: string (or Expression with resultType string). + :type compilation_mode: object + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'script_path': {'required': True}, + 'script_linked_service': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'script_path': {'key': 'typeProperties.scriptPath', 'type': 'object'}, + 'script_linked_service': {'key': 'typeProperties.scriptLinkedService', 'type': 'LinkedServiceReference'}, + 'degree_of_parallelism': {'key': 'typeProperties.degreeOfParallelism', 'type': 'object'}, + 'priority': {'key': 'typeProperties.priority', 'type': 'object'}, + 'parameters': {'key': 'typeProperties.parameters', 'type': '{object}'}, + 'runtime_version': {'key': 'typeProperties.runtimeVersion', 'type': 'object'}, + 'compilation_mode': {'key': 'typeProperties.compilationMode', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(DataLakeAnalyticsUSQLActivity, self).__init__(**kwargs) + self.type = 'DataLakeAnalyticsU-SQL' # type: str + self.script_path = kwargs['script_path'] + self.script_linked_service = kwargs['script_linked_service'] + self.degree_of_parallelism = kwargs.get('degree_of_parallelism', None) + self.priority = kwargs.get('priority', None) + self.parameters = kwargs.get('parameters', None) + self.runtime_version = kwargs.get('runtime_version', None) + self.compilation_mode = kwargs.get('compilation_mode', None) + + +class DataLakeStorageAccountDetails(msrest.serialization.Model): + """Details of the data lake storage account associated with the workspace. + + :param account_url: Account URL. + :type account_url: str + :param filesystem: Filesystem name. + :type filesystem: str + """ + + _attribute_map = { + 'account_url': {'key': 'accountUrl', 'type': 'str'}, + 'filesystem': {'key': 'filesystem', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(DataLakeStorageAccountDetails, self).__init__(**kwargs) + self.account_url = kwargs.get('account_url', None) + self.filesystem = kwargs.get('filesystem', None) + + +class DatasetCompression(msrest.serialization.Model): + """The compression method used on a dataset. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: DatasetBZip2Compression, DatasetDeflateCompression, DatasetGZipCompression, DatasetZipDeflateCompression. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset compression.Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'BZip2': 'DatasetBZip2Compression', 'Deflate': 'DatasetDeflateCompression', 'GZip': 'DatasetGZipCompression', 'ZipDeflate': 'DatasetZipDeflateCompression'} + } + + def __init__( + self, + **kwargs + ): + super(DatasetCompression, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = 'DatasetCompression' # type: str + + +class DatasetBZip2Compression(DatasetCompression): + """The BZip2 compression method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset compression.Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(DatasetBZip2Compression, self).__init__(**kwargs) + self.type = 'BZip2' # type: str + + +class DatasetDataElement(msrest.serialization.Model): + """Columns that define the structure of the dataset. + + :param name: Name of the column. Type: string (or Expression with resultType string). + :type name: object + :param type: Type of the column. Type: string (or Expression with resultType string). + :type type: object + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(DatasetDataElement, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.type = kwargs.get('type', None) + + +class DatasetDebugResource(SubResourceDebugResource): + """Dataset debug resource. + + All required parameters must be populated in order to send to Azure. + + :param name: The resource name. + :type name: str + :param properties: Required. Dataset properties. + :type properties: ~azure.synapse.artifacts.models.Dataset + """ + + _validation = { + 'properties': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'Dataset'}, + } + + def __init__( + self, + **kwargs + ): + super(DatasetDebugResource, self).__init__(**kwargs) + self.properties = kwargs['properties'] + + +class DatasetDeflateCompression(DatasetCompression): + """The Deflate compression method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset compression.Constant filled by server. + :type type: str + :param level: The Deflate compression level. Possible values include: "Optimal", "Fastest". + :type level: str or ~azure.synapse.artifacts.models.DatasetCompressionLevel + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(DatasetDeflateCompression, self).__init__(**kwargs) + self.type = 'Deflate' # type: str + self.level = kwargs.get('level', None) + + +class DatasetFolder(msrest.serialization.Model): + """The folder that this Dataset is in. If not specified, Dataset will appear at the root level. + + :param name: The name of the folder that this Dataset is in. + :type name: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(DatasetFolder, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + + +class DatasetGZipCompression(DatasetCompression): + """The GZip compression method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset compression.Constant filled by server. + :type type: str + :param level: The GZip compression level. Possible values include: "Optimal", "Fastest". + :type level: str or ~azure.synapse.artifacts.models.DatasetCompressionLevel + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(DatasetGZipCompression, self).__init__(**kwargs) + self.type = 'GZip' # type: str + self.level = kwargs.get('level', None) + + +class DatasetListResponse(msrest.serialization.Model): + """A list of dataset resources. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of datasets. + :type value: list[~azure.synapse.artifacts.models.DatasetResource] + :param next_link: The link to the next page of results, if any remaining results exist. + :type next_link: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[DatasetResource]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(DatasetListResponse, self).__init__(**kwargs) + self.value = kwargs['value'] + self.next_link = kwargs.get('next_link', None) + + +class DatasetReference(msrest.serialization.Model): + """Dataset reference type. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Dataset reference type. Possible values include: "DatasetReference". + :type type: str or ~azure.synapse.artifacts.models.DatasetReferenceType + :param reference_name: Required. Reference dataset name. + :type reference_name: str + :param parameters: Arguments for dataset. + :type parameters: dict[str, object] + """ + + _validation = { + 'type': {'required': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{object}'}, + } + + def __init__( + self, + **kwargs + ): + super(DatasetReference, self).__init__(**kwargs) + self.type = kwargs['type'] + self.reference_name = kwargs['reference_name'] + self.parameters = kwargs.get('parameters', None) + + +class DatasetResource(AzureEntityResource): + """Dataset resource type. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + :ivar etag: Resource Etag. + :vartype etag: str + :param properties: Required. Dataset properties. + :type properties: ~azure.synapse.artifacts.models.Dataset + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'Dataset'}, + } + + def __init__( + self, + **kwargs + ): + super(DatasetResource, self).__init__(**kwargs) + self.properties = kwargs['properties'] + + +class DatasetSchemaDataElement(msrest.serialization.Model): + """Columns that define the physical type schema of the dataset. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Name of the schema column. Type: string (or Expression with resultType string). + :type name: object + :param type: Type of the schema column. Type: string (or Expression with resultType string). + :type type: object + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(DatasetSchemaDataElement, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.name = kwargs.get('name', None) + self.type = kwargs.get('type', None) + + +class DatasetZipDeflateCompression(DatasetCompression): + """The ZipDeflate compression method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset compression.Constant filled by server. + :type type: str + :param level: The ZipDeflate compression level. Possible values include: "Optimal", "Fastest". + :type level: str or ~azure.synapse.artifacts.models.DatasetCompressionLevel + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(DatasetZipDeflateCompression, self).__init__(**kwargs) + self.type = 'ZipDeflate' # type: str + self.level = kwargs.get('level', None) + + +class Db2LinkedService(LinkedService): + """Linked service for DB2 data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param server: Required. Server name for connection. Type: string (or Expression with + resultType string). + :type server: object + :param database: Required. Database name for connection. Type: string (or Expression with + resultType string). + :type database: object + :param authentication_type: AuthenticationType to be used for connection. Possible values + include: "Basic". + :type authentication_type: str or ~azure.synapse.artifacts.models.Db2AuthenticationType + :param username: Username for authentication. Type: string (or Expression with resultType + string). + :type username: object + :param password: Password for authentication. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param package_collection: Under where packages are created when querying database. Type: + string (or Expression with resultType string). + :type package_collection: object + :param certificate_common_name: Certificate Common Name when TLS is enabled. Type: string (or + Expression with resultType string). + :type certificate_common_name: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'server': {'required': True}, + 'database': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'package_collection': {'key': 'typeProperties.packageCollection', 'type': 'object'}, + 'certificate_common_name': {'key': 'typeProperties.certificateCommonName', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(Db2LinkedService, self).__init__(**kwargs) + self.type = 'Db2' # type: str + self.server = kwargs['server'] + self.database = kwargs['database'] + self.authentication_type = kwargs.get('authentication_type', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.package_collection = kwargs.get('package_collection', None) + self.certificate_common_name = kwargs.get('certificate_common_name', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + + +class Db2Source(TabularSource): + """A copy activity source for Db2 databases. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: Database query. Type: string (or Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(Db2Source, self).__init__(**kwargs) + self.type = 'Db2Source' # type: str + self.query = kwargs.get('query', None) + + +class Db2TableDataset(Dataset): + """The Db2 table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: This property will be retired. Please consider using schema + table + properties instead. + :type table_name: object + :param schema_type_properties_schema: The Db2 schema name. Type: string (or Expression with + resultType string). + :type schema_type_properties_schema: object + :param table: The Db2 table name. Type: string (or Expression with resultType string). + :type table: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(Db2TableDataset, self).__init__(**kwargs) + self.type = 'Db2Table' # type: str + self.table_name = kwargs.get('table_name', None) + self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) + self.table = kwargs.get('table', None) + + +class DeleteActivity(ExecutionActivity): + """Delete activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param linked_service_name: Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param recursive: If true, files or sub-folders under current folder path will be deleted + recursively. Default is false. Type: boolean (or Expression with resultType boolean). + :type recursive: object + :param max_concurrent_connections: The max concurrent connections to connect data source at the + same time. + :type max_concurrent_connections: int + :param enable_logging: Whether to record detailed logs of delete-activity execution. Default + value is false. Type: boolean (or Expression with resultType boolean). + :type enable_logging: object + :param log_storage_settings: Log storage settings customer need to provide when enableLogging + is true. + :type log_storage_settings: ~azure.synapse.artifacts.models.LogStorageSettings + :param dataset: Required. Delete activity dataset reference. + :type dataset: ~azure.synapse.artifacts.models.DatasetReference + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'max_concurrent_connections': {'minimum': 1}, + 'dataset': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'recursive': {'key': 'typeProperties.recursive', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'typeProperties.maxConcurrentConnections', 'type': 'int'}, + 'enable_logging': {'key': 'typeProperties.enableLogging', 'type': 'object'}, + 'log_storage_settings': {'key': 'typeProperties.logStorageSettings', 'type': 'LogStorageSettings'}, + 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, + } + + def __init__( + self, + **kwargs + ): + super(DeleteActivity, self).__init__(**kwargs) + self.type = 'Delete' # type: str + self.recursive = kwargs.get('recursive', None) + self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) + self.enable_logging = kwargs.get('enable_logging', None) + self.log_storage_settings = kwargs.get('log_storage_settings', None) + self.dataset = kwargs['dataset'] + + +class DeleteDataFlowDebugSessionRequest(msrest.serialization.Model): + """Request body structure for deleting data flow debug session. + + :param session_id: The ID of data flow debug session. + :type session_id: str + :param data_flow_name: The data flow which contains the debug session. + :type data_flow_name: str + """ + + _attribute_map = { + 'session_id': {'key': 'sessionId', 'type': 'str'}, + 'data_flow_name': {'key': 'dataFlowName', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(DeleteDataFlowDebugSessionRequest, self).__init__(**kwargs) + self.session_id = kwargs.get('session_id', None) + self.data_flow_name = kwargs.get('data_flow_name', None) + + +class DelimitedTextDataset(Dataset): + """Delimited text dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param location: The location of the delimited text storage. + :type location: ~azure.synapse.artifacts.models.DatasetLocation + :param column_delimiter: The column delimiter. Type: string (or Expression with resultType + string). + :type column_delimiter: object + :param row_delimiter: The row delimiter. Type: string (or Expression with resultType string). + :type row_delimiter: object + :param encoding_name: The code page name of the preferred encoding. If miss, the default value + is UTF-8, unless BOM denotes another Unicode encoding. Refer to the name column of the table in + the following link to set supported values: + https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with + resultType string). + :type encoding_name: object + :param compression_codec: Possible values include: "bzip2", "gzip", "deflate", "zipDeflate", + "snappy", "lz4". + :type compression_codec: str or ~azure.synapse.artifacts.models.DelimitedTextCompressionCodec + :param compression_level: The data compression method used for DelimitedText. Possible values + include: "Optimal", "Fastest". + :type compression_level: str or ~azure.synapse.artifacts.models.DatasetCompressionLevel + :param quote_char: The quote character. Type: string (or Expression with resultType string). + :type quote_char: object + :param escape_char: The escape character. Type: string (or Expression with resultType string). + :type escape_char: object + :param first_row_as_header: When used as input, treat the first row of data as headers. When + used as output,write the headers into the output as the first row of data. The default value is + false. Type: boolean (or Expression with resultType boolean). + :type first_row_as_header: object + :param null_value: The null value string. Type: string (or Expression with resultType string). + :type null_value: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'column_delimiter': {'key': 'typeProperties.columnDelimiter', 'type': 'object'}, + 'row_delimiter': {'key': 'typeProperties.rowDelimiter', 'type': 'object'}, + 'encoding_name': {'key': 'typeProperties.encodingName', 'type': 'object'}, + 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'str'}, + 'compression_level': {'key': 'typeProperties.compressionLevel', 'type': 'str'}, + 'quote_char': {'key': 'typeProperties.quoteChar', 'type': 'object'}, + 'escape_char': {'key': 'typeProperties.escapeChar', 'type': 'object'}, + 'first_row_as_header': {'key': 'typeProperties.firstRowAsHeader', 'type': 'object'}, + 'null_value': {'key': 'typeProperties.nullValue', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(DelimitedTextDataset, self).__init__(**kwargs) + self.type = 'DelimitedText' # type: str + self.location = kwargs.get('location', None) + self.column_delimiter = kwargs.get('column_delimiter', None) + self.row_delimiter = kwargs.get('row_delimiter', None) + self.encoding_name = kwargs.get('encoding_name', None) + self.compression_codec = kwargs.get('compression_codec', None) + self.compression_level = kwargs.get('compression_level', None) + self.quote_char = kwargs.get('quote_char', None) + self.escape_char = kwargs.get('escape_char', None) + self.first_row_as_header = kwargs.get('first_row_as_header', None) + self.null_value = kwargs.get('null_value', None) + + +class FormatReadSettings(msrest.serialization.Model): + """Format read settings. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: DelimitedTextReadSettings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The read setting type.Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'DelimitedTextReadSettings': 'DelimitedTextReadSettings'} + } + + def __init__( + self, + **kwargs + ): + super(FormatReadSettings, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = 'FormatReadSettings' # type: str + + +class DelimitedTextReadSettings(FormatReadSettings): + """Delimited text read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The read setting type.Constant filled by server. + :type type: str + :param skip_line_count: Indicates the number of non-empty rows to skip when reading data from + input files. Type: integer (or Expression with resultType integer). + :type skip_line_count: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'skip_line_count': {'key': 'skipLineCount', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(DelimitedTextReadSettings, self).__init__(**kwargs) + self.type = 'DelimitedTextReadSettings' # type: str + self.skip_line_count = kwargs.get('skip_line_count', None) + + +class DelimitedTextSink(CopySink): + """A copy activity DelimitedText sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param store_settings: DelimitedText store settings. + :type store_settings: ~azure.synapse.artifacts.models.StoreWriteSettings + :param format_settings: DelimitedText format settings. + :type format_settings: ~azure.synapse.artifacts.models.DelimitedTextWriteSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextWriteSettings'}, + } + + def __init__( + self, + **kwargs + ): + super(DelimitedTextSink, self).__init__(**kwargs) + self.type = 'DelimitedTextSink' # type: str + self.store_settings = kwargs.get('store_settings', None) + self.format_settings = kwargs.get('format_settings', None) + + +class DelimitedTextSource(CopySource): + """A copy activity DelimitedText source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param store_settings: DelimitedText store settings. + :type store_settings: ~azure.synapse.artifacts.models.StoreReadSettings + :param format_settings: DelimitedText format settings. + :type format_settings: ~azure.synapse.artifacts.models.DelimitedTextReadSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextReadSettings'}, + } + + def __init__( + self, + **kwargs + ): + super(DelimitedTextSource, self).__init__(**kwargs) + self.type = 'DelimitedTextSource' # type: str + self.store_settings = kwargs.get('store_settings', None) + self.format_settings = kwargs.get('format_settings', None) + + +class DelimitedTextWriteSettings(FormatWriteSettings): + """Delimited text write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The write setting type.Constant filled by server. + :type type: str + :param quote_all_text: Indicates whether string values should always be enclosed with quotes. + Type: boolean (or Expression with resultType boolean). + :type quote_all_text: object + :param file_extension: Required. The file extension used to create the files. Type: string (or + Expression with resultType string). + :type file_extension: object + """ + + _validation = { + 'type': {'required': True}, + 'file_extension': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'quote_all_text': {'key': 'quoteAllText', 'type': 'object'}, + 'file_extension': {'key': 'fileExtension', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(DelimitedTextWriteSettings, self).__init__(**kwargs) + self.type = 'DelimitedTextWriteSettings' # type: str + self.quote_all_text = kwargs.get('quote_all_text', None) + self.file_extension = kwargs['file_extension'] + + +class DependencyReference(msrest.serialization.Model): + """Referenced dependency. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: SelfDependencyTumblingWindowTriggerReference, TriggerDependencyReference. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. The type of dependency reference.Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'SelfDependencyTumblingWindowTriggerReference': 'SelfDependencyTumblingWindowTriggerReference', 'TriggerDependencyReference': 'TriggerDependencyReference'} + } + + def __init__( + self, + **kwargs + ): + super(DependencyReference, self).__init__(**kwargs) + self.type = None # type: Optional[str] + + +class DistcpSettings(msrest.serialization.Model): + """Distcp settings. + + All required parameters must be populated in order to send to Azure. + + :param resource_manager_endpoint: Required. Specifies the Yarn ResourceManager endpoint. Type: + string (or Expression with resultType string). + :type resource_manager_endpoint: object + :param temp_script_path: Required. Specifies an existing folder path which will be used to + store temp Distcp command script. The script file is generated by ADF and will be removed after + Copy job finished. Type: string (or Expression with resultType string). + :type temp_script_path: object + :param distcp_options: Specifies the Distcp options. Type: string (or Expression with + resultType string). + :type distcp_options: object + """ + + _validation = { + 'resource_manager_endpoint': {'required': True}, + 'temp_script_path': {'required': True}, + } + + _attribute_map = { + 'resource_manager_endpoint': {'key': 'resourceManagerEndpoint', 'type': 'object'}, + 'temp_script_path': {'key': 'tempScriptPath', 'type': 'object'}, + 'distcp_options': {'key': 'distcpOptions', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(DistcpSettings, self).__init__(**kwargs) + self.resource_manager_endpoint = kwargs['resource_manager_endpoint'] + self.temp_script_path = kwargs['temp_script_path'] + self.distcp_options = kwargs.get('distcp_options', None) + + +class DocumentDbCollectionDataset(Dataset): + """Microsoft Azure Document Database Collection dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param collection_name: Required. Document Database collection name. Type: string (or + Expression with resultType string). + :type collection_name: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + 'collection_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'collection_name': {'key': 'typeProperties.collectionName', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(DocumentDbCollectionDataset, self).__init__(**kwargs) + self.type = 'DocumentDbCollection' # type: str + self.collection_name = kwargs['collection_name'] + + +class DocumentDbCollectionSink(CopySink): + """A copy activity Document Database Collection sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param nesting_separator: Nested properties separator. Default is . (dot). Type: string (or + Expression with resultType string). + :type nesting_separator: object + :param write_behavior: Describes how to write data to Azure Cosmos DB. Type: string (or + Expression with resultType string). Allowed values: insert and upsert. + :type write_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(DocumentDbCollectionSink, self).__init__(**kwargs) + self.type = 'DocumentDbCollectionSink' # type: str + self.nesting_separator = kwargs.get('nesting_separator', None) + self.write_behavior = kwargs.get('write_behavior', None) + + +class DocumentDbCollectionSource(CopySource): + """A copy activity Document Database Collection source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query: Documents query. Type: string (or Expression with resultType string). + :type query: object + :param nesting_separator: Nested properties separator. Type: string (or Expression with + resultType string). + :type nesting_separator: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(DocumentDbCollectionSource, self).__init__(**kwargs) + self.type = 'DocumentDbCollectionSource' # type: str + self.query = kwargs.get('query', None) + self.nesting_separator = kwargs.get('nesting_separator', None) + self.query_timeout = kwargs.get('query_timeout', None) + + +class DrillLinkedService(LinkedService): + """Drill server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param connection_string: An ODBC connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param pwd: The Azure key vault secret reference of password in connection string. + :type pwd: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(DrillLinkedService, self).__init__(**kwargs) + self.type = 'Drill' # type: str + self.connection_string = kwargs.get('connection_string', None) + self.pwd = kwargs.get('pwd', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + + +class DrillSource(TabularSource): + """A copy activity Drill server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(DrillSource, self).__init__(**kwargs) + self.type = 'DrillSource' # type: str + self.query = kwargs.get('query', None) + + +class DrillTableDataset(Dataset): + """Drill server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: This property will be retired. Please consider using schema + table + properties instead. + :type table_name: object + :param table: The table name of the Drill. Type: string (or Expression with resultType string). + :type table: object + :param schema_type_properties_schema: The schema name of the Drill. Type: string (or Expression + with resultType string). + :type schema_type_properties_schema: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(DrillTableDataset, self).__init__(**kwargs) + self.type = 'DrillTable' # type: str + self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) + + +class DWCopyCommandDefaultValue(msrest.serialization.Model): + """Default value. + + :param column_name: Column name. Type: object (or Expression with resultType string). + :type column_name: object + :param default_value: The default value of the column. Type: object (or Expression with + resultType string). + :type default_value: object + """ + + _attribute_map = { + 'column_name': {'key': 'columnName', 'type': 'object'}, + 'default_value': {'key': 'defaultValue', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(DWCopyCommandDefaultValue, self).__init__(**kwargs) + self.column_name = kwargs.get('column_name', None) + self.default_value = kwargs.get('default_value', None) + + +class DWCopyCommandSettings(msrest.serialization.Model): + """DW Copy Command settings. + + :param default_values: Specifies the default values for each target column in SQL DW. The + default values in the property overwrite the DEFAULT constraint set in the DB, and identity + column cannot have a default value. Type: array of objects (or Expression with resultType array + of objects). + :type default_values: list[~azure.synapse.artifacts.models.DWCopyCommandDefaultValue] + :param additional_options: Additional options directly passed to SQL DW in Copy Command. Type: + key value pairs (value should be string type) (or Expression with resultType object). Example: + "additionalOptions": { "MAXERRORS": "1000", "DATEFORMAT": "'ymd'" }. + :type additional_options: dict[str, str] + """ + + _attribute_map = { + 'default_values': {'key': 'defaultValues', 'type': '[DWCopyCommandDefaultValue]'}, + 'additional_options': {'key': 'additionalOptions', 'type': '{str}'}, + } + + def __init__( + self, + **kwargs + ): + super(DWCopyCommandSettings, self).__init__(**kwargs) + self.default_values = kwargs.get('default_values', None) + self.additional_options = kwargs.get('additional_options', None) + + +class DynamicsAXLinkedService(LinkedService): + """Dynamics AX linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param url: Required. The Dynamics AX (or Dynamics 365 Finance and Operations) instance OData + endpoint. + :type url: object + :param service_principal_id: Required. Specify the application's client ID. Type: string (or + Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: Required. Specify the application's key. Mark this field as a + SecureString to store it securely in Data Factory, or reference a secret stored in Azure Key + Vault. Type: string (or Expression with resultType string). + :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase + :param tenant: Required. Specify the tenant information (domain name or tenant ID) under which + your application resides. Retrieve it by hovering the mouse in the top-right corner of the + Azure portal. Type: string (or Expression with resultType string). + :type tenant: object + :param aad_resource_id: Required. Specify the resource you are requesting authorization. Type: + string (or Expression with resultType string). + :type aad_resource_id: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + 'service_principal_id': {'required': True}, + 'service_principal_key': {'required': True}, + 'tenant': {'required': True}, + 'aad_resource_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(DynamicsAXLinkedService, self).__init__(**kwargs) + self.type = 'DynamicsAX' # type: str + self.url = kwargs['url'] + self.service_principal_id = kwargs['service_principal_id'] + self.service_principal_key = kwargs['service_principal_key'] + self.tenant = kwargs['tenant'] + self.aad_resource_id = kwargs['aad_resource_id'] + self.encrypted_credential = kwargs.get('encrypted_credential', None) + + +class DynamicsAXResourceDataset(Dataset): + """The path of the Dynamics AX OData entity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param path: Required. The path of the Dynamics AX OData entity. Type: string (or Expression + with resultType string). + :type path: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + 'path': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'path': {'key': 'typeProperties.path', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(DynamicsAXResourceDataset, self).__init__(**kwargs) + self.type = 'DynamicsAXResource' # type: str + self.path = kwargs['path'] + + +class DynamicsAXSource(TabularSource): + """A copy activity Dynamics AX source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(DynamicsAXSource, self).__init__(**kwargs) + self.type = 'DynamicsAXSource' # type: str + self.query = kwargs.get('query', None) + + +class DynamicsCrmEntityDataset(Dataset): + """The Dynamics CRM entity dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param entity_name: The logical name of the entity. Type: string (or Expression with resultType + string). + :type entity_name: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(DynamicsCrmEntityDataset, self).__init__(**kwargs) + self.type = 'DynamicsCrmEntity' # type: str + self.entity_name = kwargs.get('entity_name', None) + + +class DynamicsCrmLinkedService(LinkedService): + """Dynamics CRM linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param deployment_type: Required. The deployment type of the Dynamics CRM instance. 'Online' + for Dynamics CRM Online and 'OnPremisesWithIfd' for Dynamics CRM on-premises with Ifd. Type: + string (or Expression with resultType string). Possible values include: "Online", + "OnPremisesWithIfd". + :type deployment_type: str or ~azure.synapse.artifacts.models.DynamicsDeploymentType + :param host_name: The host name of the on-premises Dynamics CRM server. The property is + required for on-prem and not allowed for online. Type: string (or Expression with resultType + string). + :type host_name: object + :param port: The port of on-premises Dynamics CRM server. The property is required for on-prem + and not allowed for online. Default is 443. Type: integer (or Expression with resultType + integer), minimum: 0. + :type port: object + :param service_uri: The URL to the Microsoft Dynamics CRM server. The property is required for + on-line and not allowed for on-prem. Type: string (or Expression with resultType string). + :type service_uri: object + :param organization_name: The organization name of the Dynamics CRM instance. The property is + required for on-prem and required for online when there are more than one Dynamics CRM + instances associated with the user. Type: string (or Expression with resultType string). + :type organization_name: object + :param authentication_type: Required. The authentication type to connect to Dynamics CRM + server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, + 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or + Expression with resultType string). Possible values include: "Office365", "Ifd", + "AADServicePrincipal". + :type authentication_type: str or ~azure.synapse.artifacts.models.DynamicsAuthenticationType + :param username: User name to access the Dynamics CRM instance. Type: string (or Expression + with resultType string). + :type username: object + :param password: Password to access the Dynamics CRM instance. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param service_principal_id: The client ID of the application in Azure Active Directory used + for Server-To-Server authentication. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_credential_type: The service principal credential type to use in + Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' + for certificate. Type: string (or Expression with resultType string). Possible values include: + "ServicePrincipalKey", "ServicePrincipalCert". + :type service_principal_credential_type: str or + ~azure.synapse.artifacts.models.DynamicsServicePrincipalCredentialType + :param service_principal_credential: The credential of the service principal object in Azure + Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', + servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If + servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only + be AzureKeyVaultSecretReference. + :type service_principal_credential: ~azure.synapse.artifacts.models.SecretBase + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'deployment_type': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, + 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, + 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'str'}, + 'service_principal_credential': {'key': 'typeProperties.servicePrincipalCredential', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(DynamicsCrmLinkedService, self).__init__(**kwargs) + self.type = 'DynamicsCrm' # type: str + self.deployment_type = kwargs['deployment_type'] + self.host_name = kwargs.get('host_name', None) + self.port = kwargs.get('port', None) + self.service_uri = kwargs.get('service_uri', None) + self.organization_name = kwargs.get('organization_name', None) + self.authentication_type = kwargs['authentication_type'] + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_credential_type = kwargs.get('service_principal_credential_type', None) + self.service_principal_credential = kwargs.get('service_principal_credential', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + + +class DynamicsCrmSink(CopySink): + """A copy activity Dynamics CRM sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param write_behavior: Required. The write behavior for the operation. Possible values include: + "Upsert". + :type write_behavior: str or ~azure.synapse.artifacts.models.DynamicsSinkWriteBehavior + :param ignore_null_values: The flag indicating whether to ignore null values from input dataset + (except key fields) during write operation. Default is false. Type: boolean (or Expression with + resultType boolean). + :type ignore_null_values: object + :param alternate_key_name: The logical name of the alternate key which will be used when + upserting records. Type: string (or Expression with resultType string). + :type alternate_key_name: object + """ + + _validation = { + 'type': {'required': True}, + 'write_behavior': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, + 'alternate_key_name': {'key': 'alternateKeyName', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(DynamicsCrmSink, self).__init__(**kwargs) + self.type = 'DynamicsCrmSink' # type: str + self.write_behavior = kwargs['write_behavior'] + self.ignore_null_values = kwargs.get('ignore_null_values', None) + self.alternate_key_name = kwargs.get('alternate_key_name', None) + + +class DynamicsCrmSource(CopySource): + """A copy activity Dynamics CRM source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query: FetchXML is a proprietary query language that is used in Microsoft Dynamics CRM + (online & on-premises). Type: string (or Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(DynamicsCrmSource, self).__init__(**kwargs) + self.type = 'DynamicsCrmSource' # type: str + self.query = kwargs.get('query', None) + + +class DynamicsEntityDataset(Dataset): + """The Dynamics entity dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param entity_name: The logical name of the entity. Type: string (or Expression with resultType + string). + :type entity_name: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(DynamicsEntityDataset, self).__init__(**kwargs) + self.type = 'DynamicsEntity' # type: str + self.entity_name = kwargs.get('entity_name', None) + + +class DynamicsLinkedService(LinkedService): + """Dynamics linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param deployment_type: Required. The deployment type of the Dynamics instance. 'Online' for + Dynamics Online and 'OnPremisesWithIfd' for Dynamics on-premises with Ifd. Type: string (or + Expression with resultType string). Possible values include: "Online", "OnPremisesWithIfd". + :type deployment_type: str or ~azure.synapse.artifacts.models.DynamicsDeploymentType + :param host_name: The host name of the on-premises Dynamics server. The property is required + for on-prem and not allowed for online. Type: string (or Expression with resultType string). + :type host_name: str + :param port: The port of on-premises Dynamics server. The property is required for on-prem and + not allowed for online. Default is 443. Type: integer (or Expression with resultType integer), + minimum: 0. + :type port: str + :param service_uri: The URL to the Microsoft Dynamics server. The property is required for on- + line and not allowed for on-prem. Type: string (or Expression with resultType string). + :type service_uri: str + :param organization_name: The organization name of the Dynamics instance. The property is + required for on-prem and required for online when there are more than one Dynamics instances + associated with the user. Type: string (or Expression with resultType string). + :type organization_name: str + :param authentication_type: Required. The authentication type to connect to Dynamics server. + 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, 'AADServicePrincipal' + for Server-To-Server authentication in online scenario. Type: string (or Expression with + resultType string). Possible values include: "Office365", "Ifd", "AADServicePrincipal". + :type authentication_type: str or ~azure.synapse.artifacts.models.DynamicsAuthenticationType + :param username: User name to access the Dynamics instance. Type: string (or Expression with + resultType string). + :type username: object + :param password: Password to access the Dynamics instance. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param service_principal_id: The client ID of the application in Azure Active Directory used + for Server-To-Server authentication. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_credential_type: The service principal credential type to use in + Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' + for certificate. Type: string (or Expression with resultType string). Possible values include: + "ServicePrincipalKey", "ServicePrincipalCert". + :type service_principal_credential_type: str or + ~azure.synapse.artifacts.models.DynamicsServicePrincipalCredentialType + :param service_principal_credential: The credential of the service principal object in Azure + Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', + servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If + servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only + be AzureKeyVaultSecretReference. + :type service_principal_credential: ~azure.synapse.artifacts.models.SecretBase + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'deployment_type': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, + 'host_name': {'key': 'typeProperties.hostName', 'type': 'str'}, + 'port': {'key': 'typeProperties.port', 'type': 'str'}, + 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'str'}, + 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'str'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'str'}, + 'service_principal_credential': {'key': 'typeProperties.servicePrincipalCredential', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(DynamicsLinkedService, self).__init__(**kwargs) + self.type = 'Dynamics' # type: str + self.deployment_type = kwargs['deployment_type'] + self.host_name = kwargs.get('host_name', None) + self.port = kwargs.get('port', None) + self.service_uri = kwargs.get('service_uri', None) + self.organization_name = kwargs.get('organization_name', None) + self.authentication_type = kwargs['authentication_type'] + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_credential_type = kwargs.get('service_principal_credential_type', None) + self.service_principal_credential = kwargs.get('service_principal_credential', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + + +class DynamicsSink(CopySink): + """A copy activity Dynamics sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param write_behavior: Required. The write behavior for the operation. Possible values include: + "Upsert". + :type write_behavior: str or ~azure.synapse.artifacts.models.DynamicsSinkWriteBehavior + :param ignore_null_values: The flag indicating whether ignore null values from input dataset + (except key fields) during write operation. Default is false. Type: boolean (or Expression with + resultType boolean). + :type ignore_null_values: object + :param alternate_key_name: The logical name of the alternate key which will be used when + upserting records. Type: string (or Expression with resultType string). + :type alternate_key_name: object + """ + + _validation = { + 'type': {'required': True}, + 'write_behavior': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, + 'alternate_key_name': {'key': 'alternateKeyName', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(DynamicsSink, self).__init__(**kwargs) + self.type = 'DynamicsSink' # type: str + self.write_behavior = kwargs['write_behavior'] + self.ignore_null_values = kwargs.get('ignore_null_values', None) + self.alternate_key_name = kwargs.get('alternate_key_name', None) + + +class DynamicsSource(CopySource): + """A copy activity Dynamics source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query: FetchXML is a proprietary query language that is used in Microsoft Dynamics + (online & on-premises). Type: string (or Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(DynamicsSource, self).__init__(**kwargs) + self.type = 'DynamicsSource' # type: str + self.query = kwargs.get('query', None) + + +class EloquaLinkedService(LinkedService): + """Eloqua server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param endpoint: Required. The endpoint of the Eloqua server. (i.e. eloqua.example.com). + :type endpoint: object + :param username: Required. The site name and user name of your Eloqua account in the form: + sitename/username. (i.e. Eloqua/Alice). + :type username: object + :param password: The password corresponding to the user name. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using + HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name in the server's + certificate to match the host name of the server when connecting over SSL. The default value is + true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of the server when + connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + 'username': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(EloquaLinkedService, self).__init__(**kwargs) + self.type = 'Eloqua' # type: str + self.endpoint = kwargs['endpoint'] + self.username = kwargs['username'] + self.password = kwargs.get('password', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + + +class EloquaObjectDataset(Dataset): + """Eloqua server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: The table name. Type: string (or Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(EloquaObjectDataset, self).__init__(**kwargs) + self.type = 'EloquaObject' # type: str + self.table_name = kwargs.get('table_name', None) + + +class EloquaSource(TabularSource): + """A copy activity Eloqua server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(EloquaSource, self).__init__(**kwargs) + self.type = 'EloquaSource' # type: str + self.query = kwargs.get('query', None) + + +class EntityReference(msrest.serialization.Model): + """The entity reference. + + :param type: The type of this referenced entity. Possible values include: + "IntegrationRuntimeReference", "LinkedServiceReference". + :type type: str or ~azure.synapse.artifacts.models.IntegrationRuntimeEntityReferenceType + :param reference_name: The name of this referenced entity. + :type reference_name: str + """ + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(EntityReference, self).__init__(**kwargs) + self.type = kwargs.get('type', None) + self.reference_name = kwargs.get('reference_name', None) + + +class ErrorAdditionalInfo(msrest.serialization.Model): + """The resource management error additional info. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar type: The additional info type. + :vartype type: str + :ivar info: The additional info. + :vartype info: object + """ + + _validation = { + 'type': {'readonly': True}, + 'info': {'readonly': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'info': {'key': 'info', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(ErrorAdditionalInfo, self).__init__(**kwargs) + self.type = None + self.info = None + + +class ErrorContract(msrest.serialization.Model): + """Contains details when the response code indicates an error. + + :param error: The error details. + :type error: ~azure.synapse.artifacts.models.ErrorResponse + """ + + _attribute_map = { + 'error': {'key': 'error', 'type': 'ErrorResponse'}, + } + + def __init__( + self, + **kwargs + ): + super(ErrorContract, self).__init__(**kwargs) + self.error = kwargs.get('error', None) + + +class ErrorResponse(msrest.serialization.Model): + """The resource management error response. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar code: The error code. + :vartype code: str + :ivar message: The error message. + :vartype message: str + :ivar target: The error target. + :vartype target: str + :ivar details: The error details. + :vartype details: list[~azure.synapse.artifacts.models.ErrorResponse] + :ivar additional_info: The error additional info. + :vartype additional_info: list[~azure.synapse.artifacts.models.ErrorAdditionalInfo] + """ + + _validation = { + 'code': {'readonly': True}, + 'message': {'readonly': True}, + 'target': {'readonly': True}, + 'details': {'readonly': True}, + 'additional_info': {'readonly': True}, + } + + _attribute_map = { + 'code': {'key': 'code', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + 'target': {'key': 'target', 'type': 'str'}, + 'details': {'key': 'details', 'type': '[ErrorResponse]'}, + 'additional_info': {'key': 'additionalInfo', 'type': '[ErrorAdditionalInfo]'}, + } + + def __init__( + self, + **kwargs + ): + super(ErrorResponse, self).__init__(**kwargs) + self.code = None + self.message = None + self.target = None + self.details = None + self.additional_info = None + + +class EvaluateDataFlowExpressionRequest(msrest.serialization.Model): + """Request body structure for data flow expression preview. + + :param session_id: The ID of data flow debug session. + :type session_id: str + :param data_flow_name: The data flow which contains the debug session. + :type data_flow_name: str + :param stream_name: The output stream name. + :type stream_name: str + :param row_limits: The row limit for preview request. + :type row_limits: int + :param expression: The expression for preview. + :type expression: str + """ + + _attribute_map = { + 'session_id': {'key': 'sessionId', 'type': 'str'}, + 'data_flow_name': {'key': 'dataFlowName', 'type': 'str'}, + 'stream_name': {'key': 'streamName', 'type': 'str'}, + 'row_limits': {'key': 'rowLimits', 'type': 'int'}, + 'expression': {'key': 'expression', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(EvaluateDataFlowExpressionRequest, self).__init__(**kwargs) + self.session_id = kwargs.get('session_id', None) + self.data_flow_name = kwargs.get('data_flow_name', None) + self.stream_name = kwargs.get('stream_name', None) + self.row_limits = kwargs.get('row_limits', None) + self.expression = kwargs.get('expression', None) + + +class ExecuteDataFlowActivity(ExecutionActivity): + """Execute data flow activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param linked_service_name: Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param data_flow: Required. Data flow reference. + :type data_flow: ~azure.synapse.artifacts.models.DataFlowReference + :param staging: Staging info for execute data flow activity. + :type staging: ~azure.synapse.artifacts.models.DataFlowStagingInfo + :param integration_runtime: The integration runtime reference. + :type integration_runtime: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param compute: Compute properties for data flow activity. + :type compute: ~azure.synapse.artifacts.models.ExecuteDataFlowActivityTypePropertiesCompute + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'data_flow': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'data_flow': {'key': 'typeProperties.dataFlow', 'type': 'DataFlowReference'}, + 'staging': {'key': 'typeProperties.staging', 'type': 'DataFlowStagingInfo'}, + 'integration_runtime': {'key': 'typeProperties.integrationRuntime', 'type': 'IntegrationRuntimeReference'}, + 'compute': {'key': 'typeProperties.compute', 'type': 'ExecuteDataFlowActivityTypePropertiesCompute'}, + } + + def __init__( + self, + **kwargs + ): + super(ExecuteDataFlowActivity, self).__init__(**kwargs) + self.type = 'ExecuteDataFlow' # type: str + self.data_flow = kwargs['data_flow'] + self.staging = kwargs.get('staging', None) + self.integration_runtime = kwargs.get('integration_runtime', None) + self.compute = kwargs.get('compute', None) + + +class ExecuteDataFlowActivityTypePropertiesCompute(msrest.serialization.Model): + """Compute properties for data flow activity. + + :param compute_type: Compute type of the cluster which will execute data flow job. Possible + values include: "General", "MemoryOptimized", "ComputeOptimized". + :type compute_type: str or ~azure.synapse.artifacts.models.DataFlowComputeType + :param core_count: Core count of the cluster which will execute data flow job. Supported values + are: 8, 16, 32, 48, 80, 144 and 272. + :type core_count: int + """ + + _attribute_map = { + 'compute_type': {'key': 'computeType', 'type': 'str'}, + 'core_count': {'key': 'coreCount', 'type': 'int'}, + } + + def __init__( + self, + **kwargs + ): + super(ExecuteDataFlowActivityTypePropertiesCompute, self).__init__(**kwargs) + self.compute_type = kwargs.get('compute_type', None) + self.core_count = kwargs.get('core_count', None) + + +class ExecutePipelineActivity(Activity): + """Execute pipeline activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param pipeline: Required. Pipeline reference. + :type pipeline: ~azure.synapse.artifacts.models.PipelineReference + :param parameters: Pipeline parameters. + :type parameters: dict[str, object] + :param wait_on_completion: Defines whether activity execution will wait for the dependent + pipeline execution to finish. Default is false. + :type wait_on_completion: bool + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'pipeline': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'pipeline': {'key': 'typeProperties.pipeline', 'type': 'PipelineReference'}, + 'parameters': {'key': 'typeProperties.parameters', 'type': '{object}'}, + 'wait_on_completion': {'key': 'typeProperties.waitOnCompletion', 'type': 'bool'}, + } + + def __init__( + self, + **kwargs + ): + super(ExecutePipelineActivity, self).__init__(**kwargs) + self.type = 'ExecutePipeline' # type: str + self.pipeline = kwargs['pipeline'] + self.parameters = kwargs.get('parameters', None) + self.wait_on_completion = kwargs.get('wait_on_completion', None) + + +class ExecuteSSISPackageActivity(ExecutionActivity): + """Execute SSIS package activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param linked_service_name: Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param package_location: Required. SSIS package location. + :type package_location: ~azure.synapse.artifacts.models.SSISPackageLocation + :param runtime: Specifies the runtime to execute SSIS package. The value should be "x86" or + "x64". Type: string (or Expression with resultType string). + :type runtime: object + :param logging_level: The logging level of SSIS package execution. Type: string (or Expression + with resultType string). + :type logging_level: object + :param environment_path: The environment path to execute the SSIS package. Type: string (or + Expression with resultType string). + :type environment_path: object + :param execution_credential: The package execution credential. + :type execution_credential: ~azure.synapse.artifacts.models.SSISExecutionCredential + :param connect_via: Required. The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param project_parameters: The project level parameters to execute the SSIS package. + :type project_parameters: dict[str, ~azure.synapse.artifacts.models.SSISExecutionParameter] + :param package_parameters: The package level parameters to execute the SSIS package. + :type package_parameters: dict[str, ~azure.synapse.artifacts.models.SSISExecutionParameter] + :param project_connection_managers: The project level connection managers to execute the SSIS + package. + :type project_connection_managers: dict[str, object] + :param package_connection_managers: The package level connection managers to execute the SSIS + package. + :type package_connection_managers: dict[str, object] + :param property_overrides: The property overrides to execute the SSIS package. + :type property_overrides: dict[str, ~azure.synapse.artifacts.models.SSISPropertyOverride] + :param log_location: SSIS package execution log location. + :type log_location: ~azure.synapse.artifacts.models.SSISLogLocation + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'package_location': {'required': True}, + 'connect_via': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'package_location': {'key': 'typeProperties.packageLocation', 'type': 'SSISPackageLocation'}, + 'runtime': {'key': 'typeProperties.runtime', 'type': 'object'}, + 'logging_level': {'key': 'typeProperties.loggingLevel', 'type': 'object'}, + 'environment_path': {'key': 'typeProperties.environmentPath', 'type': 'object'}, + 'execution_credential': {'key': 'typeProperties.executionCredential', 'type': 'SSISExecutionCredential'}, + 'connect_via': {'key': 'typeProperties.connectVia', 'type': 'IntegrationRuntimeReference'}, + 'project_parameters': {'key': 'typeProperties.projectParameters', 'type': '{SSISExecutionParameter}'}, + 'package_parameters': {'key': 'typeProperties.packageParameters', 'type': '{SSISExecutionParameter}'}, + 'project_connection_managers': {'key': 'typeProperties.projectConnectionManagers', 'type': '{object}'}, + 'package_connection_managers': {'key': 'typeProperties.packageConnectionManagers', 'type': '{object}'}, + 'property_overrides': {'key': 'typeProperties.propertyOverrides', 'type': '{SSISPropertyOverride}'}, + 'log_location': {'key': 'typeProperties.logLocation', 'type': 'SSISLogLocation'}, + } + + def __init__( + self, + **kwargs + ): + super(ExecuteSSISPackageActivity, self).__init__(**kwargs) + self.type = 'ExecuteSSISPackage' # type: str + self.package_location = kwargs['package_location'] + self.runtime = kwargs.get('runtime', None) + self.logging_level = kwargs.get('logging_level', None) + self.environment_path = kwargs.get('environment_path', None) + self.execution_credential = kwargs.get('execution_credential', None) + self.connect_via = kwargs['connect_via'] + self.project_parameters = kwargs.get('project_parameters', None) + self.package_parameters = kwargs.get('package_parameters', None) + self.project_connection_managers = kwargs.get('project_connection_managers', None) + self.package_connection_managers = kwargs.get('package_connection_managers', None) + self.property_overrides = kwargs.get('property_overrides', None) + self.log_location = kwargs.get('log_location', None) + + +class ExposureControlRequest(msrest.serialization.Model): + """The exposure control request. + + :param feature_name: The feature name. + :type feature_name: str + :param feature_type: The feature type. + :type feature_type: str + """ + + _attribute_map = { + 'feature_name': {'key': 'featureName', 'type': 'str'}, + 'feature_type': {'key': 'featureType', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ExposureControlRequest, self).__init__(**kwargs) + self.feature_name = kwargs.get('feature_name', None) + self.feature_type = kwargs.get('feature_type', None) + + +class ExposureControlResponse(msrest.serialization.Model): + """The exposure control response. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar feature_name: The feature name. + :vartype feature_name: str + :ivar value: The feature value. + :vartype value: str + """ + + _validation = { + 'feature_name': {'readonly': True}, + 'value': {'readonly': True}, + } + + _attribute_map = { + 'feature_name': {'key': 'featureName', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ExposureControlResponse, self).__init__(**kwargs) + self.feature_name = None + self.value = None + + +class Expression(msrest.serialization.Model): + """Azure Synapse expression definition. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Expression type. Possible values include: "Expression". + :type type: str or ~azure.synapse.artifacts.models.ExpressionType + :param value: Required. Expression value. + :type value: str + """ + + _validation = { + 'type': {'required': True}, + 'value': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(Expression, self).__init__(**kwargs) + self.type = kwargs['type'] + self.value = kwargs['value'] + + +class FileServerLinkedService(LinkedService): + """File system linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param host: Required. Host name of the server. Type: string (or Expression with resultType + string). + :type host: object + :param user_id: User ID to logon the server. Type: string (or Expression with resultType + string). + :type user_id: object + :param password: Password to logon the server. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'user_id': {'key': 'typeProperties.userId', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(FileServerLinkedService, self).__init__(**kwargs) + self.type = 'FileServer' # type: str + self.host = kwargs['host'] + self.user_id = kwargs.get('user_id', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + + +class FileServerLocation(DatasetLocation): + """The location of file server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location.Constant filled by server. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(FileServerLocation, self).__init__(**kwargs) + self.type = 'FileServerLocation' # type: str + + +class FileServerReadSettings(StoreReadSettings): + """File server read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The read setting type.Constant filled by server. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :type recursive: object + :param wildcard_folder_path: FileServer wildcardFolderPath. Type: string (or Expression with + resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: FileServer wildcardFileName. Type: string (or Expression with + resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. Type: string (or + Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression + with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(FileServerReadSettings, self).__init__(**kwargs) + self.type = 'FileServerReadSettings' # type: str + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) + + +class FileServerWriteSettings(StoreWriteSettings): + """File server write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The write setting type.Constant filled by server. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(FileServerWriteSettings, self).__init__(**kwargs) + self.type = 'FileServerWriteSettings' # type: str + + +class FileSystemSink(CopySink): + """A copy activity file system sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(FileSystemSink, self).__init__(**kwargs) + self.type = 'FileSystemSink' # type: str + self.copy_behavior = kwargs.get('copy_behavior', None) + + +class FileSystemSource(CopySource): + """A copy activity file system source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :type recursive: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(FileSystemSource, self).__init__(**kwargs) + self.type = 'FileSystemSource' # type: str + self.recursive = kwargs.get('recursive', None) + + +class FilterActivity(Activity): + """Filter and return results from input array based on the conditions. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param items: Required. Input array on which filter should be applied. + :type items: ~azure.synapse.artifacts.models.Expression + :param condition: Required. Condition to be used for filtering the input. + :type condition: ~azure.synapse.artifacts.models.Expression + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'items': {'required': True}, + 'condition': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'items': {'key': 'typeProperties.items', 'type': 'Expression'}, + 'condition': {'key': 'typeProperties.condition', 'type': 'Expression'}, + } + + def __init__( + self, + **kwargs + ): + super(FilterActivity, self).__init__(**kwargs) + self.type = 'Filter' # type: str + self.items = kwargs['items'] + self.condition = kwargs['condition'] + + +class ForEachActivity(Activity): + """This activity is used for iterating over a collection and execute given activities. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param is_sequential: Should the loop be executed in sequence or in parallel (max 50). + :type is_sequential: bool + :param batch_count: Batch count to be used for controlling the number of parallel execution + (when isSequential is set to false). + :type batch_count: int + :param items: Required. Collection to iterate. + :type items: ~azure.synapse.artifacts.models.Expression + :param activities: Required. List of activities to execute . + :type activities: list[~azure.synapse.artifacts.models.Activity] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'batch_count': {'maximum': 50}, + 'items': {'required': True}, + 'activities': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'is_sequential': {'key': 'typeProperties.isSequential', 'type': 'bool'}, + 'batch_count': {'key': 'typeProperties.batchCount', 'type': 'int'}, + 'items': {'key': 'typeProperties.items', 'type': 'Expression'}, + 'activities': {'key': 'typeProperties.activities', 'type': '[Activity]'}, + } + + def __init__( + self, + **kwargs + ): + super(ForEachActivity, self).__init__(**kwargs) + self.type = 'ForEach' # type: str + self.is_sequential = kwargs.get('is_sequential', None) + self.batch_count = kwargs.get('batch_count', None) + self.items = kwargs['items'] + self.activities = kwargs['activities'] + + +class FtpReadSettings(StoreReadSettings): + """Ftp read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The read setting type.Constant filled by server. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :type recursive: object + :param wildcard_folder_path: Ftp wildcardFolderPath. Type: string (or Expression with + resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Ftp wildcardFileName. Type: string (or Expression with resultType + string). + :type wildcard_file_name: object + :param use_binary_transfer: Specify whether to use binary transfer mode for FTP stores. + :type use_binary_transfer: bool + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'use_binary_transfer': {'key': 'useBinaryTransfer', 'type': 'bool'}, + } + + def __init__( + self, + **kwargs + ): + super(FtpReadSettings, self).__init__(**kwargs) + self.type = 'FtpReadSettings' # type: str + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.use_binary_transfer = kwargs.get('use_binary_transfer', None) + + +class FtpServerLinkedService(LinkedService): + """A FTP server Linked Service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param host: Required. Host name of the FTP server. Type: string (or Expression with resultType + string). + :type host: object + :param port: The TCP port number that the FTP server uses to listen for client connections. + Default value is 21. Type: integer (or Expression with resultType integer), minimum: 0. + :type port: object + :param authentication_type: The authentication type to be used to connect to the FTP server. + Possible values include: "Basic", "Anonymous". + :type authentication_type: str or ~azure.synapse.artifacts.models.FtpAuthenticationType + :param user_name: Username to logon the FTP server. Type: string (or Expression with resultType + string). + :type user_name: object + :param password: Password to logon the FTP server. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + :param enable_ssl: If true, connect to the FTP server over SSL/TLS channel. Default value is + true. Type: boolean (or Expression with resultType boolean). + :type enable_ssl: object + :param enable_server_certificate_validation: If true, validate the FTP server SSL certificate + when connect over SSL/TLS channel. Default value is true. Type: boolean (or Expression with + resultType boolean). + :type enable_server_certificate_validation: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(FtpServerLinkedService, self).__init__(**kwargs) + self.type = 'FtpServer' # type: str + self.host = kwargs['host'] + self.port = kwargs.get('port', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.enable_ssl = kwargs.get('enable_ssl', None) + self.enable_server_certificate_validation = kwargs.get('enable_server_certificate_validation', None) + + +class FtpServerLocation(DatasetLocation): + """The location of ftp server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location.Constant filled by server. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(FtpServerLocation, self).__init__(**kwargs) + self.type = 'FtpServerLocation' # type: str + + +class GetMetadataActivity(ExecutionActivity): + """Activity to get metadata of dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param linked_service_name: Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param dataset: Required. GetMetadata activity dataset reference. + :type dataset: ~azure.synapse.artifacts.models.DatasetReference + :param field_list: Fields of metadata to get from dataset. + :type field_list: list[object] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'dataset': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, + 'field_list': {'key': 'typeProperties.fieldList', 'type': '[object]'}, + } + + def __init__( + self, + **kwargs + ): + super(GetMetadataActivity, self).__init__(**kwargs) + self.type = 'GetMetadata' # type: str + self.dataset = kwargs['dataset'] + self.field_list = kwargs.get('field_list', None) + + +class GetSsisObjectMetadataRequest(msrest.serialization.Model): + """The request payload of get SSIS object metadata. + + :param metadata_path: Metadata path. + :type metadata_path: str + """ + + _attribute_map = { + 'metadata_path': {'key': 'metadataPath', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(GetSsisObjectMetadataRequest, self).__init__(**kwargs) + self.metadata_path = kwargs.get('metadata_path', None) + + +class GoogleAdWordsLinkedService(LinkedService): + """Google AdWords service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param client_customer_id: Required. The Client customer ID of the AdWords account that you + want to fetch report data for. + :type client_customer_id: object + :param developer_token: Required. The developer token associated with the manager account that + you use to grant access to the AdWords API. + :type developer_token: ~azure.synapse.artifacts.models.SecretBase + :param authentication_type: Required. The OAuth 2.0 authentication mechanism used for + authentication. ServiceAuthentication can only be used on self-hosted IR. Possible values + include: "ServiceAuthentication", "UserAuthentication". + :type authentication_type: str or + ~azure.synapse.artifacts.models.GoogleAdWordsAuthenticationType + :param refresh_token: The refresh token obtained from Google for authorizing access to AdWords + for UserAuthentication. + :type refresh_token: ~azure.synapse.artifacts.models.SecretBase + :param client_id: The client id of the google application used to acquire the refresh token. + Type: string (or Expression with resultType string). + :type client_id: object + :param client_secret: The client secret of the google application used to acquire the refresh + token. + :type client_secret: ~azure.synapse.artifacts.models.SecretBase + :param email: The service account email ID that is used for ServiceAuthentication and can only + be used on self-hosted IR. + :type email: object + :param key_file_path: The full path to the .p12 key file that is used to authenticate the + service account email address and can only be used on self-hosted IR. + :type key_file_path: object + :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for + verifying the server when connecting over SSL. This property can only be set when using SSL on + self-hosted IR. The default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust + store or from a specified PEM file. The default value is false. + :type use_system_trust_store: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'client_customer_id': {'required': True}, + 'developer_token': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'client_customer_id': {'key': 'typeProperties.clientCustomerID', 'type': 'object'}, + 'developer_token': {'key': 'typeProperties.developerToken', 'type': 'SecretBase'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'email': {'key': 'typeProperties.email', 'type': 'object'}, + 'key_file_path': {'key': 'typeProperties.keyFilePath', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(GoogleAdWordsLinkedService, self).__init__(**kwargs) + self.type = 'GoogleAdWords' # type: str + self.client_customer_id = kwargs['client_customer_id'] + self.developer_token = kwargs['developer_token'] + self.authentication_type = kwargs['authentication_type'] + self.refresh_token = kwargs.get('refresh_token', None) + self.client_id = kwargs.get('client_id', None) + self.client_secret = kwargs.get('client_secret', None) + self.email = kwargs.get('email', None) + self.key_file_path = kwargs.get('key_file_path', None) + self.trusted_cert_path = kwargs.get('trusted_cert_path', None) + self.use_system_trust_store = kwargs.get('use_system_trust_store', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + + +class GoogleAdWordsObjectDataset(Dataset): + """Google AdWords service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: The table name. Type: string (or Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(GoogleAdWordsObjectDataset, self).__init__(**kwargs) + self.type = 'GoogleAdWordsObject' # type: str + self.table_name = kwargs.get('table_name', None) + + +class GoogleAdWordsSource(TabularSource): + """A copy activity Google AdWords service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(GoogleAdWordsSource, self).__init__(**kwargs) + self.type = 'GoogleAdWordsSource' # type: str + self.query = kwargs.get('query', None) + + +class GoogleBigQueryLinkedService(LinkedService): + """Google BigQuery service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param project: Required. The default BigQuery project to query against. + :type project: object + :param additional_projects: A comma-separated list of public BigQuery projects to access. + :type additional_projects: object + :param request_google_drive_scope: Whether to request access to Google Drive. Allowing Google + Drive access enables support for federated tables that combine BigQuery data with data from + Google Drive. The default value is false. + :type request_google_drive_scope: object + :param authentication_type: Required. The OAuth 2.0 authentication mechanism used for + authentication. ServiceAuthentication can only be used on self-hosted IR. Possible values + include: "ServiceAuthentication", "UserAuthentication". + :type authentication_type: str or + ~azure.synapse.artifacts.models.GoogleBigQueryAuthenticationType + :param refresh_token: The refresh token obtained from Google for authorizing access to BigQuery + for UserAuthentication. + :type refresh_token: ~azure.synapse.artifacts.models.SecretBase + :param client_id: The client id of the google application used to acquire the refresh token. + Type: string (or Expression with resultType string). + :type client_id: object + :param client_secret: The client secret of the google application used to acquire the refresh + token. + :type client_secret: ~azure.synapse.artifacts.models.SecretBase + :param email: The service account email ID that is used for ServiceAuthentication and can only + be used on self-hosted IR. + :type email: object + :param key_file_path: The full path to the .p12 key file that is used to authenticate the + service account email address and can only be used on self-hosted IR. + :type key_file_path: object + :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for + verifying the server when connecting over SSL. This property can only be set when using SSL on + self-hosted IR. The default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust + store or from a specified PEM file. The default value is false. + :type use_system_trust_store: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'project': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'project': {'key': 'typeProperties.project', 'type': 'object'}, + 'additional_projects': {'key': 'typeProperties.additionalProjects', 'type': 'object'}, + 'request_google_drive_scope': {'key': 'typeProperties.requestGoogleDriveScope', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'email': {'key': 'typeProperties.email', 'type': 'object'}, + 'key_file_path': {'key': 'typeProperties.keyFilePath', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(GoogleBigQueryLinkedService, self).__init__(**kwargs) + self.type = 'GoogleBigQuery' # type: str + self.project = kwargs['project'] + self.additional_projects = kwargs.get('additional_projects', None) + self.request_google_drive_scope = kwargs.get('request_google_drive_scope', None) + self.authentication_type = kwargs['authentication_type'] + self.refresh_token = kwargs.get('refresh_token', None) + self.client_id = kwargs.get('client_id', None) + self.client_secret = kwargs.get('client_secret', None) + self.email = kwargs.get('email', None) + self.key_file_path = kwargs.get('key_file_path', None) + self.trusted_cert_path = kwargs.get('trusted_cert_path', None) + self.use_system_trust_store = kwargs.get('use_system_trust_store', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + + +class GoogleBigQueryObjectDataset(Dataset): + """Google BigQuery service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: This property will be retired. Please consider using database + table + properties instead. + :type table_name: object + :param table: The table name of the Google BigQuery. Type: string (or Expression with + resultType string). + :type table: object + :param dataset: The database name of the Google BigQuery. Type: string (or Expression with + resultType string). + :type dataset: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'dataset': {'key': 'typeProperties.dataset', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(GoogleBigQueryObjectDataset, self).__init__(**kwargs) + self.type = 'GoogleBigQueryObject' # type: str + self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.dataset = kwargs.get('dataset', None) + + +class GoogleBigQuerySource(TabularSource): + """A copy activity Google BigQuery service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(GoogleBigQuerySource, self).__init__(**kwargs) + self.type = 'GoogleBigQuerySource' # type: str + self.query = kwargs.get('query', None) + + +class GoogleCloudStorageLinkedService(LinkedService): + """Linked service for Google Cloud Storage. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param access_key_id: The access key identifier of the Google Cloud Storage Identity and Access + Management (IAM) user. Type: string (or Expression with resultType string). + :type access_key_id: object + :param secret_access_key: The secret access key of the Google Cloud Storage Identity and Access + Management (IAM) user. + :type secret_access_key: ~azure.synapse.artifacts.models.SecretBase + :param service_url: This value specifies the endpoint to access with the Google Cloud Storage + Connector. This is an optional property; change it only if you want to try a different service + endpoint or want to switch between https and http. Type: string (or Expression with resultType + string). + :type service_url: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, + 'secret_access_key': {'key': 'typeProperties.secretAccessKey', 'type': 'SecretBase'}, + 'service_url': {'key': 'typeProperties.serviceUrl', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(GoogleCloudStorageLinkedService, self).__init__(**kwargs) + self.type = 'GoogleCloudStorage' # type: str + self.access_key_id = kwargs.get('access_key_id', None) + self.secret_access_key = kwargs.get('secret_access_key', None) + self.service_url = kwargs.get('service_url', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + + +class GoogleCloudStorageLocation(DatasetLocation): + """The location of Google Cloud Storage dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location.Constant filled by server. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + string). + :type file_name: object + :param bucket_name: Specify the bucketName of Google Cloud Storage. Type: string (or Expression + with resultType string). + :type bucket_name: object + :param version: Specify the version of Google Cloud Storage. Type: string (or Expression with + resultType string). + :type version: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'bucket_name': {'key': 'bucketName', 'type': 'object'}, + 'version': {'key': 'version', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(GoogleCloudStorageLocation, self).__init__(**kwargs) + self.type = 'GoogleCloudStorageLocation' # type: str + self.bucket_name = kwargs.get('bucket_name', None) + self.version = kwargs.get('version', None) + + +class GoogleCloudStorageReadSettings(StoreReadSettings): + """Google Cloud Storage read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The read setting type.Constant filled by server. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :type recursive: object + :param wildcard_folder_path: Google Cloud Storage wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Google Cloud Storage wildcardFileName. Type: string (or Expression + with resultType string). + :type wildcard_file_name: object + :param prefix: The prefix filter for the Google Cloud Storage object name. Type: string (or + Expression with resultType string). + :type prefix: object + :param enable_partition_discovery: Indicates whether to enable partition discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. Type: string (or + Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression + with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'prefix': {'key': 'prefix', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(GoogleCloudStorageReadSettings, self).__init__(**kwargs) + self.type = 'GoogleCloudStorageReadSettings' # type: str + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.prefix = kwargs.get('prefix', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) + + +class GreenplumLinkedService(LinkedService): + """Greenplum Database linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param connection_string: An ODBC connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param pwd: The Azure key vault secret reference of password in connection string. + :type pwd: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(GreenplumLinkedService, self).__init__(**kwargs) + self.type = 'Greenplum' # type: str + self.connection_string = kwargs.get('connection_string', None) + self.pwd = kwargs.get('pwd', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + + +class GreenplumSource(TabularSource): + """A copy activity Greenplum Database source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(GreenplumSource, self).__init__(**kwargs) + self.type = 'GreenplumSource' # type: str + self.query = kwargs.get('query', None) + + +class GreenplumTableDataset(Dataset): + """Greenplum Database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: This property will be retired. Please consider using schema + table + properties instead. + :type table_name: object + :param table: The table name of Greenplum. Type: string (or Expression with resultType string). + :type table: object + :param schema_type_properties_schema: The schema name of Greenplum. Type: string (or Expression + with resultType string). + :type schema_type_properties_schema: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(GreenplumTableDataset, self).__init__(**kwargs) + self.type = 'GreenplumTable' # type: str + self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) + + +class HBaseLinkedService(LinkedService): + """HBase server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param host: Required. The IP address or host name of the HBase server. (i.e. 192.168.222.160). + :type host: object + :param port: The TCP port that the HBase instance uses to listen for client connections. The + default value is 9090. + :type port: object + :param http_path: The partial URL corresponding to the HBase server. (i.e. + /gateway/sandbox/hbase/version). + :type http_path: object + :param authentication_type: Required. The authentication mechanism to use to connect to the + HBase server. Possible values include: "Anonymous", "Basic". + :type authentication_type: str or ~azure.synapse.artifacts.models.HBaseAuthenticationType + :param username: The user name used to connect to the HBase instance. + :type username: object + :param password: The password corresponding to the user name. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The + default value is false. + :type enable_ssl: object + :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for + verifying the server when connecting over SSL. This property can only be set when using SSL on + self-hosted IR. The default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate + name to match the host name of the server when connecting over SSL. The default value is false. + :type allow_host_name_cn_mismatch: object + :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from + the server. The default value is false. + :type allow_self_signed_server_cert: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, + 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(HBaseLinkedService, self).__init__(**kwargs) + self.type = 'HBase' # type: str + self.host = kwargs['host'] + self.port = kwargs.get('port', None) + self.http_path = kwargs.get('http_path', None) + self.authentication_type = kwargs['authentication_type'] + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.enable_ssl = kwargs.get('enable_ssl', None) + self.trusted_cert_path = kwargs.get('trusted_cert_path', None) + self.allow_host_name_cn_mismatch = kwargs.get('allow_host_name_cn_mismatch', None) + self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + + +class HBaseObjectDataset(Dataset): + """HBase server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: The table name. Type: string (or Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(HBaseObjectDataset, self).__init__(**kwargs) + self.type = 'HBaseObject' # type: str + self.table_name = kwargs.get('table_name', None) + + +class HBaseSource(TabularSource): + """A copy activity HBase server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(HBaseSource, self).__init__(**kwargs) + self.type = 'HBaseSource' # type: str + self.query = kwargs.get('query', None) + + +class HdfsLinkedService(LinkedService): + """Hadoop Distributed File System (HDFS) linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param url: Required. The URL of the HDFS service endpoint, e.g. + http://myhostname:50070/webhdfs/v1 . Type: string (or Expression with resultType string). + :type url: object + :param authentication_type: Type of authentication used to connect to the HDFS. Possible values + are: Anonymous and Windows. Type: string (or Expression with resultType string). + :type authentication_type: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + :param user_name: User name for Windows authentication. Type: string (or Expression with + resultType string). + :type user_name: object + :param password: Password for Windows authentication. + :type password: ~azure.synapse.artifacts.models.SecretBase + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + } + + def __init__( + self, + **kwargs + ): + super(HdfsLinkedService, self).__init__(**kwargs) + self.type = 'Hdfs' # type: str + self.url = kwargs['url'] + self.authentication_type = kwargs.get('authentication_type', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + + +class HdfsLocation(DatasetLocation): + """The location of HDFS. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location.Constant filled by server. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(HdfsLocation, self).__init__(**kwargs) + self.type = 'HdfsLocation' # type: str + + +class HdfsReadSettings(StoreReadSettings): + """HDFS read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The read setting type.Constant filled by server. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :type recursive: object + :param wildcard_folder_path: HDFS wildcardFolderPath. Type: string (or Expression with + resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: HDFS wildcardFileName. Type: string (or Expression with resultType + string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. Type: string (or + Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression + with resultType string). + :type modified_datetime_end: object + :param distcp_settings: Specifies Distcp-related settings. + :type distcp_settings: ~azure.synapse.artifacts.models.DistcpSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + 'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'}, + } + + def __init__( + self, + **kwargs + ): + super(HdfsReadSettings, self).__init__(**kwargs) + self.type = 'HdfsReadSettings' # type: str + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) + self.distcp_settings = kwargs.get('distcp_settings', None) + + +class HdfsSource(CopySource): + """A copy activity HDFS source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :type recursive: object + :param distcp_settings: Specifies Distcp-related settings. + :type distcp_settings: ~azure.synapse.artifacts.models.DistcpSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'}, + } + + def __init__( + self, + **kwargs + ): + super(HdfsSource, self).__init__(**kwargs) + self.type = 'HdfsSource' # type: str + self.recursive = kwargs.get('recursive', None) + self.distcp_settings = kwargs.get('distcp_settings', None) + + +class HDInsightHiveActivity(ExecutionActivity): + """HDInsight Hive activity type. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param linked_service_name: Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param storage_linked_services: Storage linked service references. + :type storage_linked_services: list[~azure.synapse.artifacts.models.LinkedServiceReference] + :param arguments: User specified arguments to HDInsightActivity. + :type arguments: list[object] + :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". + :type get_debug_info: str or ~azure.synapse.artifacts.models.HDInsightActivityDebugInfoOption + :param script_path: Script path. Type: string (or Expression with resultType string). + :type script_path: object + :param script_linked_service: Script linked service reference. + :type script_linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference + :param defines: Allows user to specify defines for Hive job request. + :type defines: dict[str, object] + :param variables: User specified arguments under hivevar namespace. + :type variables: list[object] + :param query_timeout: Query timeout value (in minutes). Effective when the HDInsight cluster + is with ESP (Enterprise Security Package). + :type query_timeout: int + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, + 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, + 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, + 'script_path': {'key': 'typeProperties.scriptPath', 'type': 'object'}, + 'script_linked_service': {'key': 'typeProperties.scriptLinkedService', 'type': 'LinkedServiceReference'}, + 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, + 'variables': {'key': 'typeProperties.variables', 'type': '[object]'}, + 'query_timeout': {'key': 'typeProperties.queryTimeout', 'type': 'int'}, + } + + def __init__( + self, + **kwargs + ): + super(HDInsightHiveActivity, self).__init__(**kwargs) + self.type = 'HDInsightHive' # type: str + self.storage_linked_services = kwargs.get('storage_linked_services', None) + self.arguments = kwargs.get('arguments', None) + self.get_debug_info = kwargs.get('get_debug_info', None) + self.script_path = kwargs.get('script_path', None) + self.script_linked_service = kwargs.get('script_linked_service', None) + self.defines = kwargs.get('defines', None) + self.variables = kwargs.get('variables', None) + self.query_timeout = kwargs.get('query_timeout', None) + + +class HDInsightLinkedService(LinkedService): + """HDInsight linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param cluster_uri: Required. HDInsight cluster URI. Type: string (or Expression with + resultType string). + :type cluster_uri: object + :param user_name: HDInsight cluster user name. Type: string (or Expression with resultType + string). + :type user_name: object + :param password: HDInsight cluster password. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param linked_service_name: The Azure Storage linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param hcatalog_linked_service_name: A reference to the Azure SQL linked service that points to + the HCatalog database. + :type hcatalog_linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + :param is_esp_enabled: Specify if the HDInsight is created with ESP (Enterprise Security + Package). Type: Boolean. + :type is_esp_enabled: object + :param file_system: Specify the FileSystem if the main storage for the HDInsight is ADLS Gen2. + Type: string (or Expression with resultType string). + :type file_system: object + """ + + _validation = { + 'type': {'required': True}, + 'cluster_uri': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'cluster_uri': {'key': 'typeProperties.clusterUri', 'type': 'object'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, + 'hcatalog_linked_service_name': {'key': 'typeProperties.hcatalogLinkedServiceName', 'type': 'LinkedServiceReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'is_esp_enabled': {'key': 'typeProperties.isEspEnabled', 'type': 'object'}, + 'file_system': {'key': 'typeProperties.fileSystem', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(HDInsightLinkedService, self).__init__(**kwargs) + self.type = 'HDInsight' # type: str + self.cluster_uri = kwargs['cluster_uri'] + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.linked_service_name = kwargs.get('linked_service_name', None) + self.hcatalog_linked_service_name = kwargs.get('hcatalog_linked_service_name', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.is_esp_enabled = kwargs.get('is_esp_enabled', None) + self.file_system = kwargs.get('file_system', None) + + +class HDInsightMapReduceActivity(ExecutionActivity): + """HDInsight MapReduce activity type. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param linked_service_name: Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param storage_linked_services: Storage linked service references. + :type storage_linked_services: list[~azure.synapse.artifacts.models.LinkedServiceReference] + :param arguments: User specified arguments to HDInsightActivity. + :type arguments: list[object] + :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". + :type get_debug_info: str or ~azure.synapse.artifacts.models.HDInsightActivityDebugInfoOption + :param class_name: Required. Class name. Type: string (or Expression with resultType string). + :type class_name: object + :param jar_file_path: Required. Jar path. Type: string (or Expression with resultType string). + :type jar_file_path: object + :param jar_linked_service: Jar linked service reference. + :type jar_linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference + :param jar_libs: Jar libs. + :type jar_libs: list[object] + :param defines: Allows user to specify defines for the MapReduce job request. + :type defines: dict[str, object] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'class_name': {'required': True}, + 'jar_file_path': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, + 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, + 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, + 'class_name': {'key': 'typeProperties.className', 'type': 'object'}, + 'jar_file_path': {'key': 'typeProperties.jarFilePath', 'type': 'object'}, + 'jar_linked_service': {'key': 'typeProperties.jarLinkedService', 'type': 'LinkedServiceReference'}, + 'jar_libs': {'key': 'typeProperties.jarLibs', 'type': '[object]'}, + 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, + } + + def __init__( + self, + **kwargs + ): + super(HDInsightMapReduceActivity, self).__init__(**kwargs) + self.type = 'HDInsightMapReduce' # type: str + self.storage_linked_services = kwargs.get('storage_linked_services', None) + self.arguments = kwargs.get('arguments', None) + self.get_debug_info = kwargs.get('get_debug_info', None) + self.class_name = kwargs['class_name'] + self.jar_file_path = kwargs['jar_file_path'] + self.jar_linked_service = kwargs.get('jar_linked_service', None) + self.jar_libs = kwargs.get('jar_libs', None) + self.defines = kwargs.get('defines', None) + + +class HDInsightOnDemandLinkedService(LinkedService): + """HDInsight ondemand linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param cluster_size: Required. Number of worker/data nodes in the cluster. Suggestion value: 4. + Type: string (or Expression with resultType string). + :type cluster_size: object + :param time_to_live: Required. The allowed idle time for the on-demand HDInsight cluster. + Specifies how long the on-demand HDInsight cluster stays alive after completion of an activity + run if there are no other active jobs in the cluster. The minimum value is 5 mins. Type: string + (or Expression with resultType string). + :type time_to_live: object + :param version: Required. Version of the HDInsight cluster.  Type: string (or Expression with + resultType string). + :type version: object + :param linked_service_name: Required. Azure Storage linked service to be used by the on-demand + cluster for storing and processing data. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param host_subscription_id: Required. The customer’s subscription to host the cluster. Type: + string (or Expression with resultType string). + :type host_subscription_id: object + :param service_principal_id: The service principal id for the hostSubscriptionId. Type: string + (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The key for the service principal id. + :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase + :param tenant: Required. The Tenant id/name to which the service principal belongs. Type: + string (or Expression with resultType string). + :type tenant: object + :param cluster_resource_group: Required. The resource group where the cluster belongs. Type: + string (or Expression with resultType string). + :type cluster_resource_group: object + :param cluster_name_prefix: The prefix of cluster name, postfix will be distinct with + timestamp. Type: string (or Expression with resultType string). + :type cluster_name_prefix: object + :param cluster_user_name: The username to access the cluster. Type: string (or Expression with + resultType string). + :type cluster_user_name: object + :param cluster_password: The password to access the cluster. + :type cluster_password: ~azure.synapse.artifacts.models.SecretBase + :param cluster_ssh_user_name: The username to SSH remotely connect to cluster’s node (for + Linux). Type: string (or Expression with resultType string). + :type cluster_ssh_user_name: object + :param cluster_ssh_password: The password to SSH remotely connect cluster’s node (for Linux). + :type cluster_ssh_password: ~azure.synapse.artifacts.models.SecretBase + :param additional_linked_service_names: Specifies additional storage accounts for the HDInsight + linked service so that the Data Factory service can register them on your behalf. + :type additional_linked_service_names: + list[~azure.synapse.artifacts.models.LinkedServiceReference] + :param hcatalog_linked_service_name: The name of Azure SQL linked service that point to the + HCatalog database. The on-demand HDInsight cluster is created by using the Azure SQL database + as the metastore. + :type hcatalog_linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param cluster_type: The cluster type. Type: string (or Expression with resultType string). + :type cluster_type: object + :param spark_version: The version of spark if the cluster type is 'spark'. Type: string (or + Expression with resultType string). + :type spark_version: object + :param core_configuration: Specifies the core configuration parameters (as in core-site.xml) + for the HDInsight cluster to be created. + :type core_configuration: object + :param h_base_configuration: Specifies the HBase configuration parameters (hbase-site.xml) for + the HDInsight cluster. + :type h_base_configuration: object + :param hdfs_configuration: Specifies the HDFS configuration parameters (hdfs-site.xml) for the + HDInsight cluster. + :type hdfs_configuration: object + :param hive_configuration: Specifies the hive configuration parameters (hive-site.xml) for the + HDInsight cluster. + :type hive_configuration: object + :param map_reduce_configuration: Specifies the MapReduce configuration parameters (mapred- + site.xml) for the HDInsight cluster. + :type map_reduce_configuration: object + :param oozie_configuration: Specifies the Oozie configuration parameters (oozie-site.xml) for + the HDInsight cluster. + :type oozie_configuration: object + :param storm_configuration: Specifies the Storm configuration parameters (storm-site.xml) for + the HDInsight cluster. + :type storm_configuration: object + :param yarn_configuration: Specifies the Yarn configuration parameters (yarn-site.xml) for the + HDInsight cluster. + :type yarn_configuration: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + :param head_node_size: Specifies the size of the head node for the HDInsight cluster. + :type head_node_size: object + :param data_node_size: Specifies the size of the data node for the HDInsight cluster. + :type data_node_size: object + :param zookeeper_node_size: Specifies the size of the Zoo Keeper node for the HDInsight + cluster. + :type zookeeper_node_size: object + :param script_actions: Custom script actions to run on HDI ondemand cluster once it's up. + Please refer to https://docs.microsoft.com/en-us/azure/hdinsight/hdinsight-hadoop-customize- + cluster-linux?toc=%2Fen-us%2Fazure%2Fhdinsight%2Fr-server%2FTOC.json&bc=%2Fen- + us%2Fazure%2Fbread%2Ftoc.json#understanding-script-actions. + :type script_actions: list[~azure.synapse.artifacts.models.ScriptAction] + :param virtual_network_id: The ARM resource ID for the vNet to which the cluster should be + joined after creation. Type: string (or Expression with resultType string). + :type virtual_network_id: object + :param subnet_name: The ARM resource ID for the subnet in the vNet. If virtualNetworkId was + specified, then this property is required. Type: string (or Expression with resultType string). + :type subnet_name: object + """ + + _validation = { + 'type': {'required': True}, + 'cluster_size': {'required': True}, + 'time_to_live': {'required': True}, + 'version': {'required': True}, + 'linked_service_name': {'required': True}, + 'host_subscription_id': {'required': True}, + 'tenant': {'required': True}, + 'cluster_resource_group': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'cluster_size': {'key': 'typeProperties.clusterSize', 'type': 'object'}, + 'time_to_live': {'key': 'typeProperties.timeToLive', 'type': 'object'}, + 'version': {'key': 'typeProperties.version', 'type': 'object'}, + 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, + 'host_subscription_id': {'key': 'typeProperties.hostSubscriptionId', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'cluster_resource_group': {'key': 'typeProperties.clusterResourceGroup', 'type': 'object'}, + 'cluster_name_prefix': {'key': 'typeProperties.clusterNamePrefix', 'type': 'object'}, + 'cluster_user_name': {'key': 'typeProperties.clusterUserName', 'type': 'object'}, + 'cluster_password': {'key': 'typeProperties.clusterPassword', 'type': 'SecretBase'}, + 'cluster_ssh_user_name': {'key': 'typeProperties.clusterSshUserName', 'type': 'object'}, + 'cluster_ssh_password': {'key': 'typeProperties.clusterSshPassword', 'type': 'SecretBase'}, + 'additional_linked_service_names': {'key': 'typeProperties.additionalLinkedServiceNames', 'type': '[LinkedServiceReference]'}, + 'hcatalog_linked_service_name': {'key': 'typeProperties.hcatalogLinkedServiceName', 'type': 'LinkedServiceReference'}, + 'cluster_type': {'key': 'typeProperties.clusterType', 'type': 'object'}, + 'spark_version': {'key': 'typeProperties.sparkVersion', 'type': 'object'}, + 'core_configuration': {'key': 'typeProperties.coreConfiguration', 'type': 'object'}, + 'h_base_configuration': {'key': 'typeProperties.hBaseConfiguration', 'type': 'object'}, + 'hdfs_configuration': {'key': 'typeProperties.hdfsConfiguration', 'type': 'object'}, + 'hive_configuration': {'key': 'typeProperties.hiveConfiguration', 'type': 'object'}, + 'map_reduce_configuration': {'key': 'typeProperties.mapReduceConfiguration', 'type': 'object'}, + 'oozie_configuration': {'key': 'typeProperties.oozieConfiguration', 'type': 'object'}, + 'storm_configuration': {'key': 'typeProperties.stormConfiguration', 'type': 'object'}, + 'yarn_configuration': {'key': 'typeProperties.yarnConfiguration', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'head_node_size': {'key': 'typeProperties.headNodeSize', 'type': 'object'}, + 'data_node_size': {'key': 'typeProperties.dataNodeSize', 'type': 'object'}, + 'zookeeper_node_size': {'key': 'typeProperties.zookeeperNodeSize', 'type': 'object'}, + 'script_actions': {'key': 'typeProperties.scriptActions', 'type': '[ScriptAction]'}, + 'virtual_network_id': {'key': 'typeProperties.virtualNetworkId', 'type': 'object'}, + 'subnet_name': {'key': 'typeProperties.subnetName', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(HDInsightOnDemandLinkedService, self).__init__(**kwargs) + self.type = 'HDInsightOnDemand' # type: str + self.cluster_size = kwargs['cluster_size'] + self.time_to_live = kwargs['time_to_live'] + self.version = kwargs['version'] + self.linked_service_name = kwargs['linked_service_name'] + self.host_subscription_id = kwargs['host_subscription_id'] + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs['tenant'] + self.cluster_resource_group = kwargs['cluster_resource_group'] + self.cluster_name_prefix = kwargs.get('cluster_name_prefix', None) + self.cluster_user_name = kwargs.get('cluster_user_name', None) + self.cluster_password = kwargs.get('cluster_password', None) + self.cluster_ssh_user_name = kwargs.get('cluster_ssh_user_name', None) + self.cluster_ssh_password = kwargs.get('cluster_ssh_password', None) + self.additional_linked_service_names = kwargs.get('additional_linked_service_names', None) + self.hcatalog_linked_service_name = kwargs.get('hcatalog_linked_service_name', None) + self.cluster_type = kwargs.get('cluster_type', None) + self.spark_version = kwargs.get('spark_version', None) + self.core_configuration = kwargs.get('core_configuration', None) + self.h_base_configuration = kwargs.get('h_base_configuration', None) + self.hdfs_configuration = kwargs.get('hdfs_configuration', None) + self.hive_configuration = kwargs.get('hive_configuration', None) + self.map_reduce_configuration = kwargs.get('map_reduce_configuration', None) + self.oozie_configuration = kwargs.get('oozie_configuration', None) + self.storm_configuration = kwargs.get('storm_configuration', None) + self.yarn_configuration = kwargs.get('yarn_configuration', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.head_node_size = kwargs.get('head_node_size', None) + self.data_node_size = kwargs.get('data_node_size', None) + self.zookeeper_node_size = kwargs.get('zookeeper_node_size', None) + self.script_actions = kwargs.get('script_actions', None) + self.virtual_network_id = kwargs.get('virtual_network_id', None) + self.subnet_name = kwargs.get('subnet_name', None) + + +class HDInsightPigActivity(ExecutionActivity): + """HDInsight Pig activity type. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param linked_service_name: Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param storage_linked_services: Storage linked service references. + :type storage_linked_services: list[~azure.synapse.artifacts.models.LinkedServiceReference] + :param arguments: User specified arguments to HDInsightActivity. Type: array (or Expression + with resultType array). + :type arguments: object + :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". + :type get_debug_info: str or ~azure.synapse.artifacts.models.HDInsightActivityDebugInfoOption + :param script_path: Script path. Type: string (or Expression with resultType string). + :type script_path: object + :param script_linked_service: Script linked service reference. + :type script_linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference + :param defines: Allows user to specify defines for Pig job request. + :type defines: dict[str, object] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, + 'arguments': {'key': 'typeProperties.arguments', 'type': 'object'}, + 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, + 'script_path': {'key': 'typeProperties.scriptPath', 'type': 'object'}, + 'script_linked_service': {'key': 'typeProperties.scriptLinkedService', 'type': 'LinkedServiceReference'}, + 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, + } + + def __init__( + self, + **kwargs + ): + super(HDInsightPigActivity, self).__init__(**kwargs) + self.type = 'HDInsightPig' # type: str + self.storage_linked_services = kwargs.get('storage_linked_services', None) + self.arguments = kwargs.get('arguments', None) + self.get_debug_info = kwargs.get('get_debug_info', None) + self.script_path = kwargs.get('script_path', None) + self.script_linked_service = kwargs.get('script_linked_service', None) + self.defines = kwargs.get('defines', None) + + +class HDInsightSparkActivity(ExecutionActivity): + """HDInsight Spark activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param linked_service_name: Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param root_path: Required. The root path in 'sparkJobLinkedService' for all the job’s files. + Type: string (or Expression with resultType string). + :type root_path: object + :param entry_file_path: Required. The relative path to the root folder of the code/package to + be executed. Type: string (or Expression with resultType string). + :type entry_file_path: object + :param arguments: The user-specified arguments to HDInsightSparkActivity. + :type arguments: list[object] + :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". + :type get_debug_info: str or ~azure.synapse.artifacts.models.HDInsightActivityDebugInfoOption + :param spark_job_linked_service: The storage linked service for uploading the entry file and + dependencies, and for receiving logs. + :type spark_job_linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference + :param class_name: The application's Java/Spark main class. + :type class_name: str + :param proxy_user: The user to impersonate that will execute the job. Type: string (or + Expression with resultType string). + :type proxy_user: object + :param spark_config: Spark configuration property. + :type spark_config: dict[str, object] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'root_path': {'required': True}, + 'entry_file_path': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'root_path': {'key': 'typeProperties.rootPath', 'type': 'object'}, + 'entry_file_path': {'key': 'typeProperties.entryFilePath', 'type': 'object'}, + 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, + 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, + 'spark_job_linked_service': {'key': 'typeProperties.sparkJobLinkedService', 'type': 'LinkedServiceReference'}, + 'class_name': {'key': 'typeProperties.className', 'type': 'str'}, + 'proxy_user': {'key': 'typeProperties.proxyUser', 'type': 'object'}, + 'spark_config': {'key': 'typeProperties.sparkConfig', 'type': '{object}'}, + } + + def __init__( + self, + **kwargs + ): + super(HDInsightSparkActivity, self).__init__(**kwargs) + self.type = 'HDInsightSpark' # type: str + self.root_path = kwargs['root_path'] + self.entry_file_path = kwargs['entry_file_path'] + self.arguments = kwargs.get('arguments', None) + self.get_debug_info = kwargs.get('get_debug_info', None) + self.spark_job_linked_service = kwargs.get('spark_job_linked_service', None) + self.class_name = kwargs.get('class_name', None) + self.proxy_user = kwargs.get('proxy_user', None) + self.spark_config = kwargs.get('spark_config', None) + + +class HDInsightStreamingActivity(ExecutionActivity): + """HDInsight streaming activity type. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param linked_service_name: Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param storage_linked_services: Storage linked service references. + :type storage_linked_services: list[~azure.synapse.artifacts.models.LinkedServiceReference] + :param arguments: User specified arguments to HDInsightActivity. + :type arguments: list[object] + :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". + :type get_debug_info: str or ~azure.synapse.artifacts.models.HDInsightActivityDebugInfoOption + :param mapper: Required. Mapper executable name. Type: string (or Expression with resultType + string). + :type mapper: object + :param reducer: Required. Reducer executable name. Type: string (or Expression with resultType + string). + :type reducer: object + :param input: Required. Input blob path. Type: string (or Expression with resultType string). + :type input: object + :param output: Required. Output blob path. Type: string (or Expression with resultType string). + :type output: object + :param file_paths: Required. Paths to streaming job files. Can be directories. + :type file_paths: list[object] + :param file_linked_service: Linked service reference where the files are located. + :type file_linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference + :param combiner: Combiner executable name. Type: string (or Expression with resultType string). + :type combiner: object + :param command_environment: Command line environment values. + :type command_environment: list[object] + :param defines: Allows user to specify defines for streaming job request. + :type defines: dict[str, object] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'mapper': {'required': True}, + 'reducer': {'required': True}, + 'input': {'required': True}, + 'output': {'required': True}, + 'file_paths': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, + 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, + 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, + 'mapper': {'key': 'typeProperties.mapper', 'type': 'object'}, + 'reducer': {'key': 'typeProperties.reducer', 'type': 'object'}, + 'input': {'key': 'typeProperties.input', 'type': 'object'}, + 'output': {'key': 'typeProperties.output', 'type': 'object'}, + 'file_paths': {'key': 'typeProperties.filePaths', 'type': '[object]'}, + 'file_linked_service': {'key': 'typeProperties.fileLinkedService', 'type': 'LinkedServiceReference'}, + 'combiner': {'key': 'typeProperties.combiner', 'type': 'object'}, + 'command_environment': {'key': 'typeProperties.commandEnvironment', 'type': '[object]'}, + 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, + } + + def __init__( + self, + **kwargs + ): + super(HDInsightStreamingActivity, self).__init__(**kwargs) + self.type = 'HDInsightStreaming' # type: str + self.storage_linked_services = kwargs.get('storage_linked_services', None) + self.arguments = kwargs.get('arguments', None) + self.get_debug_info = kwargs.get('get_debug_info', None) + self.mapper = kwargs['mapper'] + self.reducer = kwargs['reducer'] + self.input = kwargs['input'] + self.output = kwargs['output'] + self.file_paths = kwargs['file_paths'] + self.file_linked_service = kwargs.get('file_linked_service', None) + self.combiner = kwargs.get('combiner', None) + self.command_environment = kwargs.get('command_environment', None) + self.defines = kwargs.get('defines', None) + + +class HiveLinkedService(LinkedService): + """Hive Server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param host: Required. IP address or host name of the Hive server, separated by ';' for + multiple hosts (only when serviceDiscoveryMode is enable). + :type host: object + :param port: The TCP port that the Hive server uses to listen for client connections. + :type port: object + :param server_type: The type of Hive server. Possible values include: "HiveServer1", + "HiveServer2", "HiveThriftServer". + :type server_type: str or ~azure.synapse.artifacts.models.HiveServerType + :param thrift_transport_protocol: The transport protocol to use in the Thrift layer. Possible + values include: "Binary", "SASL", "HTTP ". + :type thrift_transport_protocol: str or + ~azure.synapse.artifacts.models.HiveThriftTransportProtocol + :param authentication_type: Required. The authentication method used to access the Hive server. + Possible values include: "Anonymous", "Username", "UsernameAndPassword", + "WindowsAzureHDInsightService". + :type authentication_type: str or ~azure.synapse.artifacts.models.HiveAuthenticationType + :param service_discovery_mode: true to indicate using the ZooKeeper service, false not. + :type service_discovery_mode: object + :param zoo_keeper_name_space: The namespace on ZooKeeper under which Hive Server 2 nodes are + added. + :type zoo_keeper_name_space: object + :param use_native_query: Specifies whether the driver uses native HiveQL queries,or converts + them into an equivalent form in HiveQL. + :type use_native_query: object + :param username: The user name that you use to access Hive Server. + :type username: object + :param password: The password corresponding to the user name that you provided in the Username + field. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param http_path: The partial URL corresponding to the Hive server. + :type http_path: object + :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The + default value is false. + :type enable_ssl: object + :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for + verifying the server when connecting over SSL. This property can only be set when using SSL on + self-hosted IR. The default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust + store or from a specified PEM file. The default value is false. + :type use_system_trust_store: object + :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate + name to match the host name of the server when connecting over SSL. The default value is false. + :type allow_host_name_cn_mismatch: object + :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from + the server. The default value is false. + :type allow_self_signed_server_cert: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'server_type': {'key': 'typeProperties.serverType', 'type': 'str'}, + 'thrift_transport_protocol': {'key': 'typeProperties.thriftTransportProtocol', 'type': 'str'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'service_discovery_mode': {'key': 'typeProperties.serviceDiscoveryMode', 'type': 'object'}, + 'zoo_keeper_name_space': {'key': 'typeProperties.zooKeeperNameSpace', 'type': 'object'}, + 'use_native_query': {'key': 'typeProperties.useNativeQuery', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, + 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(HiveLinkedService, self).__init__(**kwargs) + self.type = 'Hive' # type: str + self.host = kwargs['host'] + self.port = kwargs.get('port', None) + self.server_type = kwargs.get('server_type', None) + self.thrift_transport_protocol = kwargs.get('thrift_transport_protocol', None) + self.authentication_type = kwargs['authentication_type'] + self.service_discovery_mode = kwargs.get('service_discovery_mode', None) + self.zoo_keeper_name_space = kwargs.get('zoo_keeper_name_space', None) + self.use_native_query = kwargs.get('use_native_query', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.http_path = kwargs.get('http_path', None) + self.enable_ssl = kwargs.get('enable_ssl', None) + self.trusted_cert_path = kwargs.get('trusted_cert_path', None) + self.use_system_trust_store = kwargs.get('use_system_trust_store', None) + self.allow_host_name_cn_mismatch = kwargs.get('allow_host_name_cn_mismatch', None) + self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + + +class HiveObjectDataset(Dataset): + """Hive Server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: This property will be retired. Please consider using schema + table + properties instead. + :type table_name: object + :param table: The table name of the Hive. Type: string (or Expression with resultType string). + :type table: object + :param schema_type_properties_schema: The schema name of the Hive. Type: string (or Expression + with resultType string). + :type schema_type_properties_schema: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(HiveObjectDataset, self).__init__(**kwargs) + self.type = 'HiveObject' # type: str + self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) + + +class HiveSource(TabularSource): + """A copy activity Hive Server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(HiveSource, self).__init__(**kwargs) + self.type = 'HiveSource' # type: str + self.query = kwargs.get('query', None) + + +class HttpLinkedService(LinkedService): + """Linked service for an HTTP source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param url: Required. The base URL of the HTTP endpoint, e.g. http://www.microsoft.com. Type: + string (or Expression with resultType string). + :type url: object + :param authentication_type: The authentication type to be used to connect to the HTTP server. + Possible values include: "Basic", "Anonymous", "Digest", "Windows", "ClientCertificate". + :type authentication_type: str or ~azure.synapse.artifacts.models.HttpAuthenticationType + :param user_name: User name for Basic, Digest, or Windows authentication. Type: string (or + Expression with resultType string). + :type user_name: object + :param password: Password for Basic, Digest, Windows, or ClientCertificate with + EmbeddedCertData authentication. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param embedded_cert_data: Base64 encoded certificate data for ClientCertificate + authentication. For on-premises copy with ClientCertificate authentication, either + CertThumbprint or EmbeddedCertData/Password should be specified. Type: string (or Expression + with resultType string). + :type embedded_cert_data: object + :param cert_thumbprint: Thumbprint of certificate for ClientCertificate authentication. Only + valid for on-premises copy. For on-premises copy with ClientCertificate authentication, either + CertThumbprint or EmbeddedCertData/Password should be specified. Type: string (or Expression + with resultType string). + :type cert_thumbprint: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + :param enable_server_certificate_validation: If true, validate the HTTPS server SSL + certificate. Default value is true. Type: boolean (or Expression with resultType boolean). + :type enable_server_certificate_validation: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'embedded_cert_data': {'key': 'typeProperties.embeddedCertData', 'type': 'object'}, + 'cert_thumbprint': {'key': 'typeProperties.certThumbprint', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(HttpLinkedService, self).__init__(**kwargs) + self.type = 'HttpServer' # type: str + self.url = kwargs['url'] + self.authentication_type = kwargs.get('authentication_type', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.embedded_cert_data = kwargs.get('embedded_cert_data', None) + self.cert_thumbprint = kwargs.get('cert_thumbprint', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.enable_server_certificate_validation = kwargs.get('enable_server_certificate_validation', None) + + +class HttpReadSettings(StoreReadSettings): + """Sftp read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The read setting type.Constant filled by server. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param request_method: The HTTP method used to call the RESTful API. The default is GET. Type: + string (or Expression with resultType string). + :type request_method: object + :param request_body: The HTTP request body to the RESTful API if requestMethod is POST. Type: + string (or Expression with resultType string). + :type request_body: object + :param additional_headers: The additional HTTP headers in the request to the RESTful API. Type: + string (or Expression with resultType string). + :type additional_headers: object + :param request_timeout: Specifies the timeout for a HTTP client to get HTTP response from HTTP + server. + :type request_timeout: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'request_method': {'key': 'requestMethod', 'type': 'object'}, + 'request_body': {'key': 'requestBody', 'type': 'object'}, + 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, + 'request_timeout': {'key': 'requestTimeout', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(HttpReadSettings, self).__init__(**kwargs) + self.type = 'HttpReadSettings' # type: str + self.request_method = kwargs.get('request_method', None) + self.request_body = kwargs.get('request_body', None) + self.additional_headers = kwargs.get('additional_headers', None) + self.request_timeout = kwargs.get('request_timeout', None) + + +class HttpServerLocation(DatasetLocation): + """The location of http server. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location.Constant filled by server. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + string). + :type file_name: object + :param relative_url: Specify the relativeUrl of http server. Type: string (or Expression with + resultType string). + :type relative_url: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'relative_url': {'key': 'relativeUrl', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(HttpServerLocation, self).__init__(**kwargs) + self.type = 'HttpServerLocation' # type: str + self.relative_url = kwargs.get('relative_url', None) + + +class HttpSource(CopySource): + """A copy activity source for an HTTP file. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param http_request_timeout: Specifies the timeout for a HTTP client to get HTTP response from + HTTP server. The default value is equivalent to System.Net.HttpWebRequest.Timeout. Type: string + (or Expression with resultType string), pattern: + ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type http_request_timeout: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(HttpSource, self).__init__(**kwargs) + self.type = 'HttpSource' # type: str + self.http_request_timeout = kwargs.get('http_request_timeout', None) + + +class HubspotLinkedService(LinkedService): + """Hubspot Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param client_id: Required. The client ID associated with your Hubspot application. + :type client_id: object + :param client_secret: The client secret associated with your Hubspot application. + :type client_secret: ~azure.synapse.artifacts.models.SecretBase + :param access_token: The access token obtained when initially authenticating your OAuth + integration. + :type access_token: ~azure.synapse.artifacts.models.SecretBase + :param refresh_token: The refresh token obtained when initially authenticating your OAuth + integration. + :type refresh_token: ~azure.synapse.artifacts.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using + HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name in the server's + certificate to match the host name of the server when connecting over SSL. The default value is + true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of the server when + connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'client_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(HubspotLinkedService, self).__init__(**kwargs) + self.type = 'Hubspot' # type: str + self.client_id = kwargs['client_id'] + self.client_secret = kwargs.get('client_secret', None) + self.access_token = kwargs.get('access_token', None) + self.refresh_token = kwargs.get('refresh_token', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + + +class HubspotObjectDataset(Dataset): + """Hubspot Service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: The table name. Type: string (or Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(HubspotObjectDataset, self).__init__(**kwargs) + self.type = 'HubspotObject' # type: str + self.table_name = kwargs.get('table_name', None) + + +class HubspotSource(TabularSource): + """A copy activity Hubspot Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(HubspotSource, self).__init__(**kwargs) + self.type = 'HubspotSource' # type: str + self.query = kwargs.get('query', None) + + +class IfConditionActivity(Activity): + """This activity evaluates a boolean expression and executes either the activities under the ifTrueActivities property or the ifFalseActivities property depending on the result of the expression. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param expression: Required. An expression that would evaluate to Boolean. This is used to + determine the block of activities (ifTrueActivities or ifFalseActivities) that will be + executed. + :type expression: ~azure.synapse.artifacts.models.Expression + :param if_true_activities: List of activities to execute if expression is evaluated to true. + This is an optional property and if not provided, the activity will exit without any action. + :type if_true_activities: list[~azure.synapse.artifacts.models.Activity] + :param if_false_activities: List of activities to execute if expression is evaluated to false. + This is an optional property and if not provided, the activity will exit without any action. + :type if_false_activities: list[~azure.synapse.artifacts.models.Activity] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'expression': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'expression': {'key': 'typeProperties.expression', 'type': 'Expression'}, + 'if_true_activities': {'key': 'typeProperties.ifTrueActivities', 'type': '[Activity]'}, + 'if_false_activities': {'key': 'typeProperties.ifFalseActivities', 'type': '[Activity]'}, + } + + def __init__( + self, + **kwargs + ): + super(IfConditionActivity, self).__init__(**kwargs) + self.type = 'IfCondition' # type: str + self.expression = kwargs['expression'] + self.if_true_activities = kwargs.get('if_true_activities', None) + self.if_false_activities = kwargs.get('if_false_activities', None) + + +class ImpalaLinkedService(LinkedService): + """Impala server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param host: Required. The IP address or host name of the Impala server. (i.e. + 192.168.222.160). + :type host: object + :param port: The TCP port that the Impala server uses to listen for client connections. The + default value is 21050. + :type port: object + :param authentication_type: Required. The authentication type to use. Possible values include: + "Anonymous", "SASLUsername", "UsernameAndPassword". + :type authentication_type: str or ~azure.synapse.artifacts.models.ImpalaAuthenticationType + :param username: The user name used to access the Impala server. The default value is anonymous + when using SASLUsername. + :type username: object + :param password: The password corresponding to the user name when using UsernameAndPassword. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The + default value is false. + :type enable_ssl: object + :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for + verifying the server when connecting over SSL. This property can only be set when using SSL on + self-hosted IR. The default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust + store or from a specified PEM file. The default value is false. + :type use_system_trust_store: object + :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate + name to match the host name of the server when connecting over SSL. The default value is false. + :type allow_host_name_cn_mismatch: object + :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from + the server. The default value is false. + :type allow_self_signed_server_cert: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, + 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(ImpalaLinkedService, self).__init__(**kwargs) + self.type = 'Impala' # type: str + self.host = kwargs['host'] + self.port = kwargs.get('port', None) + self.authentication_type = kwargs['authentication_type'] + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.enable_ssl = kwargs.get('enable_ssl', None) + self.trusted_cert_path = kwargs.get('trusted_cert_path', None) + self.use_system_trust_store = kwargs.get('use_system_trust_store', None) + self.allow_host_name_cn_mismatch = kwargs.get('allow_host_name_cn_mismatch', None) + self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + + +class ImpalaObjectDataset(Dataset): + """Impala server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: This property will be retired. Please consider using schema + table + properties instead. + :type table_name: object + :param table: The table name of the Impala. Type: string (or Expression with resultType + string). + :type table: object + :param schema_type_properties_schema: The schema name of the Impala. Type: string (or + Expression with resultType string). + :type schema_type_properties_schema: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(ImpalaObjectDataset, self).__init__(**kwargs) + self.type = 'ImpalaObject' # type: str + self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) + + +class ImpalaSource(TabularSource): + """A copy activity Impala server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(ImpalaSource, self).__init__(**kwargs) + self.type = 'ImpalaSource' # type: str + self.query = kwargs.get('query', None) + + +class InformixLinkedService(LinkedService): + """Informix linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param connection_string: Required. The non-access credential portion of the connection string + as well as an optional encrypted credential. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param authentication_type: Type of authentication used to connect to the Informix as ODBC data + store. Possible values are: Anonymous and Basic. Type: string (or Expression with resultType + string). + :type authentication_type: object + :param credential: The access credential portion of the connection string specified in driver- + specific property-value format. + :type credential: ~azure.synapse.artifacts.models.SecretBase + :param user_name: User name for Basic authentication. Type: string (or Expression with + resultType string). + :type user_name: object + :param password: Password for Basic authentication. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(InformixLinkedService, self).__init__(**kwargs) + self.type = 'Informix' # type: str + self.connection_string = kwargs['connection_string'] + self.authentication_type = kwargs.get('authentication_type', None) + self.credential = kwargs.get('credential', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + + +class InformixSink(CopySink): + """A copy activity Informix sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param pre_copy_script: A query to execute before starting the copy. Type: string (or + Expression with resultType string). + :type pre_copy_script: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(InformixSink, self).__init__(**kwargs) + self.type = 'InformixSink' # type: str + self.pre_copy_script = kwargs.get('pre_copy_script', None) + + +class InformixSource(TabularSource): + """A copy activity source for Informix. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: Database query. Type: string (or Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(InformixSource, self).__init__(**kwargs) + self.type = 'InformixSource' # type: str + self.query = kwargs.get('query', None) + + +class InformixTableDataset(Dataset): + """The Informix table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: The Informix table name. Type: string (or Expression with resultType + string). + :type table_name: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(InformixTableDataset, self).__init__(**kwargs) + self.type = 'InformixTable' # type: str + self.table_name = kwargs.get('table_name', None) + + +class IntegrationRuntime(msrest.serialization.Model): + """Azure Synapse nested object which serves as a compute resource for activities. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: ManagedIntegrationRuntime, SelfHostedIntegrationRuntime. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of integration runtime.Constant filled by server. Possible values + include: "Managed", "SelfHosted". + :type type: str or ~azure.synapse.artifacts.models.IntegrationRuntimeType + :param description: Integration runtime description. + :type description: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'Managed': 'ManagedIntegrationRuntime', 'SelfHosted': 'SelfHostedIntegrationRuntime'} + } + + def __init__( + self, + **kwargs + ): + super(IntegrationRuntime, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = 'IntegrationRuntime' # type: str + self.description = kwargs.get('description', None) + + +class IntegrationRuntimeComputeProperties(msrest.serialization.Model): + """The compute resource properties for managed integration runtime. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param location: The location for managed integration runtime. The supported regions could be + found on https://docs.microsoft.com/en-us/azure/data-factory/data-factory-data-movement- + activities. + :type location: str + :param node_size: The node size requirement to managed integration runtime. + :type node_size: str + :param number_of_nodes: The required number of nodes for managed integration runtime. + :type number_of_nodes: int + :param max_parallel_executions_per_node: Maximum parallel executions count per node for managed + integration runtime. + :type max_parallel_executions_per_node: int + :param data_flow_properties: Data flow properties for managed integration runtime. + :type data_flow_properties: + ~azure.synapse.artifacts.models.IntegrationRuntimeDataFlowProperties + :param v_net_properties: VNet properties for managed integration runtime. + :type v_net_properties: ~azure.synapse.artifacts.models.IntegrationRuntimeVNetProperties + """ + + _validation = { + 'number_of_nodes': {'minimum': 1}, + 'max_parallel_executions_per_node': {'minimum': 1}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'location': {'key': 'location', 'type': 'str'}, + 'node_size': {'key': 'nodeSize', 'type': 'str'}, + 'number_of_nodes': {'key': 'numberOfNodes', 'type': 'int'}, + 'max_parallel_executions_per_node': {'key': 'maxParallelExecutionsPerNode', 'type': 'int'}, + 'data_flow_properties': {'key': 'dataFlowProperties', 'type': 'IntegrationRuntimeDataFlowProperties'}, + 'v_net_properties': {'key': 'vNetProperties', 'type': 'IntegrationRuntimeVNetProperties'}, + } + + def __init__( + self, + **kwargs + ): + super(IntegrationRuntimeComputeProperties, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.location = kwargs.get('location', None) + self.node_size = kwargs.get('node_size', None) + self.number_of_nodes = kwargs.get('number_of_nodes', None) + self.max_parallel_executions_per_node = kwargs.get('max_parallel_executions_per_node', None) + self.data_flow_properties = kwargs.get('data_flow_properties', None) + self.v_net_properties = kwargs.get('v_net_properties', None) + + +class IntegrationRuntimeCustomSetupScriptProperties(msrest.serialization.Model): + """Custom setup script properties for a managed dedicated integration runtime. + + :param blob_container_uri: The URI of the Azure blob container that contains the custom setup + script. + :type blob_container_uri: str + :param sas_token: The SAS token of the Azure blob container. + :type sas_token: ~azure.synapse.artifacts.models.SecureString + """ + + _attribute_map = { + 'blob_container_uri': {'key': 'blobContainerUri', 'type': 'str'}, + 'sas_token': {'key': 'sasToken', 'type': 'SecureString'}, + } + + def __init__( + self, + **kwargs + ): + super(IntegrationRuntimeCustomSetupScriptProperties, self).__init__(**kwargs) + self.blob_container_uri = kwargs.get('blob_container_uri', None) + self.sas_token = kwargs.get('sas_token', None) + + +class IntegrationRuntimeDataFlowProperties(msrest.serialization.Model): + """Data flow properties for managed integration runtime. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param compute_type: Compute type of the cluster which will execute data flow job. Possible + values include: "General", "MemoryOptimized", "ComputeOptimized". + :type compute_type: str or ~azure.synapse.artifacts.models.DataFlowComputeType + :param core_count: Core count of the cluster which will execute data flow job. Supported values + are: 8, 16, 32, 48, 80, 144 and 272. + :type core_count: int + :param time_to_live: Time to live (in minutes) setting of the cluster which will execute data + flow job. + :type time_to_live: int + """ + + _validation = { + 'time_to_live': {'minimum': 0}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'compute_type': {'key': 'computeType', 'type': 'str'}, + 'core_count': {'key': 'coreCount', 'type': 'int'}, + 'time_to_live': {'key': 'timeToLive', 'type': 'int'}, + } + + def __init__( + self, + **kwargs + ): + super(IntegrationRuntimeDataFlowProperties, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.compute_type = kwargs.get('compute_type', None) + self.core_count = kwargs.get('core_count', None) + self.time_to_live = kwargs.get('time_to_live', None) + + +class IntegrationRuntimeDataProxyProperties(msrest.serialization.Model): + """Data proxy properties for a managed dedicated integration runtime. + + :param connect_via: The self-hosted integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.EntityReference + :param staging_linked_service: The staging linked service reference. + :type staging_linked_service: ~azure.synapse.artifacts.models.EntityReference + :param path: The path to contain the staged data in the Blob storage. + :type path: str + """ + + _attribute_map = { + 'connect_via': {'key': 'connectVia', 'type': 'EntityReference'}, + 'staging_linked_service': {'key': 'stagingLinkedService', 'type': 'EntityReference'}, + 'path': {'key': 'path', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(IntegrationRuntimeDataProxyProperties, self).__init__(**kwargs) + self.connect_via = kwargs.get('connect_via', None) + self.staging_linked_service = kwargs.get('staging_linked_service', None) + self.path = kwargs.get('path', None) + + +class IntegrationRuntimeListResponse(msrest.serialization.Model): + """A list of integration runtime resources. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of integration runtimes. + :type value: list[~azure.synapse.artifacts.models.IntegrationRuntimeResource] + :param next_link: The link to the next page of results, if any remaining results exist. + :type next_link: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[IntegrationRuntimeResource]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(IntegrationRuntimeListResponse, self).__init__(**kwargs) + self.value = kwargs['value'] + self.next_link = kwargs.get('next_link', None) + + +class IntegrationRuntimeReference(msrest.serialization.Model): + """Integration runtime reference type. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Type of integration runtime. Possible values include: + "IntegrationRuntimeReference". + :type type: str or ~azure.synapse.artifacts.models.IntegrationRuntimeReferenceType + :param reference_name: Required. Reference integration runtime name. + :type reference_name: str + :param parameters: Arguments for integration runtime. + :type parameters: dict[str, object] + """ + + _validation = { + 'type': {'required': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{object}'}, + } + + def __init__( + self, + **kwargs + ): + super(IntegrationRuntimeReference, self).__init__(**kwargs) + self.type = kwargs['type'] + self.reference_name = kwargs['reference_name'] + self.parameters = kwargs.get('parameters', None) + + +class IntegrationRuntimeResource(AzureEntityResource): + """Integration runtime resource type. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + :ivar etag: Resource Etag. + :vartype etag: str + :param properties: Required. Integration runtime properties. + :type properties: ~azure.synapse.artifacts.models.IntegrationRuntime + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'IntegrationRuntime'}, + } + + def __init__( + self, + **kwargs + ): + super(IntegrationRuntimeResource, self).__init__(**kwargs) + self.properties = kwargs['properties'] + + +class IntegrationRuntimeSsisCatalogInfo(msrest.serialization.Model): + """Catalog information for managed dedicated integration runtime. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param catalog_server_endpoint: The catalog database server URL. + :type catalog_server_endpoint: str + :param catalog_admin_user_name: The administrator user name of catalog database. + :type catalog_admin_user_name: str + :param catalog_admin_password: The password of the administrator user account of the catalog + database. + :type catalog_admin_password: ~azure.synapse.artifacts.models.SecureString + :param catalog_pricing_tier: The pricing tier for the catalog database. The valid values could + be found in https://azure.microsoft.com/en-us/pricing/details/sql-database/. Possible values + include: "Basic", "Standard", "Premium", "PremiumRS". + :type catalog_pricing_tier: str or + ~azure.synapse.artifacts.models.IntegrationRuntimeSsisCatalogPricingTier + """ + + _validation = { + 'catalog_admin_user_name': {'max_length': 128, 'min_length': 1}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'catalog_server_endpoint': {'key': 'catalogServerEndpoint', 'type': 'str'}, + 'catalog_admin_user_name': {'key': 'catalogAdminUserName', 'type': 'str'}, + 'catalog_admin_password': {'key': 'catalogAdminPassword', 'type': 'SecureString'}, + 'catalog_pricing_tier': {'key': 'catalogPricingTier', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(IntegrationRuntimeSsisCatalogInfo, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.catalog_server_endpoint = kwargs.get('catalog_server_endpoint', None) + self.catalog_admin_user_name = kwargs.get('catalog_admin_user_name', None) + self.catalog_admin_password = kwargs.get('catalog_admin_password', None) + self.catalog_pricing_tier = kwargs.get('catalog_pricing_tier', None) + + +class IntegrationRuntimeSsisProperties(msrest.serialization.Model): + """SSIS properties for managed integration runtime. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param catalog_info: Catalog information for managed dedicated integration runtime. + :type catalog_info: ~azure.synapse.artifacts.models.IntegrationRuntimeSsisCatalogInfo + :param license_type: License type for bringing your own license scenario. Possible values + include: "BasePrice", "LicenseIncluded". + :type license_type: str or ~azure.synapse.artifacts.models.IntegrationRuntimeLicenseType + :param custom_setup_script_properties: Custom setup script properties for a managed dedicated + integration runtime. + :type custom_setup_script_properties: + ~azure.synapse.artifacts.models.IntegrationRuntimeCustomSetupScriptProperties + :param data_proxy_properties: Data proxy properties for a managed dedicated integration + runtime. + :type data_proxy_properties: + ~azure.synapse.artifacts.models.IntegrationRuntimeDataProxyProperties + :param edition: The edition for the SSIS Integration Runtime. Possible values include: + "Standard", "Enterprise". + :type edition: str or ~azure.synapse.artifacts.models.IntegrationRuntimeEdition + :param express_custom_setup_properties: Custom setup without script properties for a SSIS + integration runtime. + :type express_custom_setup_properties: list[~azure.synapse.artifacts.models.CustomSetupBase] + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'catalog_info': {'key': 'catalogInfo', 'type': 'IntegrationRuntimeSsisCatalogInfo'}, + 'license_type': {'key': 'licenseType', 'type': 'str'}, + 'custom_setup_script_properties': {'key': 'customSetupScriptProperties', 'type': 'IntegrationRuntimeCustomSetupScriptProperties'}, + 'data_proxy_properties': {'key': 'dataProxyProperties', 'type': 'IntegrationRuntimeDataProxyProperties'}, + 'edition': {'key': 'edition', 'type': 'str'}, + 'express_custom_setup_properties': {'key': 'expressCustomSetupProperties', 'type': '[CustomSetupBase]'}, + } + + def __init__( + self, + **kwargs + ): + super(IntegrationRuntimeSsisProperties, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.catalog_info = kwargs.get('catalog_info', None) + self.license_type = kwargs.get('license_type', None) + self.custom_setup_script_properties = kwargs.get('custom_setup_script_properties', None) + self.data_proxy_properties = kwargs.get('data_proxy_properties', None) + self.edition = kwargs.get('edition', None) + self.express_custom_setup_properties = kwargs.get('express_custom_setup_properties', None) + + +class IntegrationRuntimeVNetProperties(msrest.serialization.Model): + """VNet properties for managed integration runtime. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param v_net_id: The ID of the VNet that this integration runtime will join. + :type v_net_id: str + :param subnet: The name of the subnet this integration runtime will join. + :type subnet: str + :param public_i_ps: Resource IDs of the public IP addresses that this integration runtime will + use. + :type public_i_ps: list[str] + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'v_net_id': {'key': 'vNetId', 'type': 'str'}, + 'subnet': {'key': 'subnet', 'type': 'str'}, + 'public_i_ps': {'key': 'publicIPs', 'type': '[str]'}, + } + + def __init__( + self, + **kwargs + ): + super(IntegrationRuntimeVNetProperties, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.v_net_id = kwargs.get('v_net_id', None) + self.subnet = kwargs.get('subnet', None) + self.public_i_ps = kwargs.get('public_i_ps', None) + + +class JiraLinkedService(LinkedService): + """Jira Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param host: Required. The IP address or host name of the Jira service. (e.g. + jira.example.com). + :type host: object + :param port: The TCP port that the Jira server uses to listen for client connections. The + default value is 443 if connecting through HTTPS, or 8080 if connecting through HTTP. + :type port: object + :param username: Required. The user name that you use to access Jira Service. + :type username: object + :param password: The password corresponding to the user name that you provided in the username + field. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using + HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name in the server's + certificate to match the host name of the server when connecting over SSL. The default value is + true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of the server when + connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'username': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(JiraLinkedService, self).__init__(**kwargs) + self.type = 'Jira' # type: str + self.host = kwargs['host'] + self.port = kwargs.get('port', None) + self.username = kwargs['username'] + self.password = kwargs.get('password', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + + +class JiraObjectDataset(Dataset): + """Jira Service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: The table name. Type: string (or Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(JiraObjectDataset, self).__init__(**kwargs) + self.type = 'JiraObject' # type: str + self.table_name = kwargs.get('table_name', None) + + +class JiraSource(TabularSource): + """A copy activity Jira Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(JiraSource, self).__init__(**kwargs) + self.type = 'JiraSource' # type: str + self.query = kwargs.get('query', None) + + +class JsonDataset(Dataset): + """Json dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param location: The location of the json data storage. + :type location: ~azure.synapse.artifacts.models.DatasetLocation + :param encoding_name: The code page name of the preferred encoding. If not specified, the + default value is UTF-8, unless BOM denotes another Unicode encoding. Refer to the name column + of the table in the following link to set supported values: + https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with + resultType string). + :type encoding_name: object + :param compression: The data compression method used for the json dataset. + :type compression: ~azure.synapse.artifacts.models.DatasetCompression + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'encoding_name': {'key': 'typeProperties.encodingName', 'type': 'object'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__( + self, + **kwargs + ): + super(JsonDataset, self).__init__(**kwargs) + self.type = 'Json' # type: str + self.location = kwargs.get('location', None) + self.encoding_name = kwargs.get('encoding_name', None) + self.compression = kwargs.get('compression', None) + + +class JsonFormat(DatasetStorageFormat): + """The data stored in JSON format. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage format.Constant filled by server. + :type type: str + :param serializer: Serializer. Type: string (or Expression with resultType string). + :type serializer: object + :param deserializer: Deserializer. Type: string (or Expression with resultType string). + :type deserializer: object + :param file_pattern: File pattern of JSON. To be more specific, the way of separating a + collection of JSON objects. The default value is 'setOfObjects'. It is case-sensitive. Possible + values include: "setOfObjects", "arrayOfObjects". + :type file_pattern: str or ~azure.synapse.artifacts.models.JsonFormatFilePattern + :param nesting_separator: The character used to separate nesting levels. Default value is '.' + (dot). Type: string (or Expression with resultType string). + :type nesting_separator: object + :param encoding_name: The code page name of the preferred encoding. If not provided, the + default value is 'utf-8', unless the byte order mark (BOM) denotes another Unicode encoding. + The full list of supported values can be found in the 'Name' column of the table of encodings + in the following reference: https://go.microsoft.com/fwlink/?linkid=861078. Type: string (or + Expression with resultType string). + :type encoding_name: object + :param json_node_reference: The JSONPath of the JSON array element to be flattened. Example: + "$.ArrayPath". Type: string (or Expression with resultType string). + :type json_node_reference: object + :param json_path_definition: The JSONPath definition for each column mapping with a customized + column name to extract data from JSON file. For fields under root object, start with "$"; for + fields inside the array chosen by jsonNodeReference property, start from the array element. + Example: {"Column1": "$.Column1Path", "Column2": "Column2PathInArray"}. Type: object (or + Expression with resultType object). + :type json_path_definition: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'serializer': {'key': 'serializer', 'type': 'object'}, + 'deserializer': {'key': 'deserializer', 'type': 'object'}, + 'file_pattern': {'key': 'filePattern', 'type': 'str'}, + 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, + 'encoding_name': {'key': 'encodingName', 'type': 'object'}, + 'json_node_reference': {'key': 'jsonNodeReference', 'type': 'object'}, + 'json_path_definition': {'key': 'jsonPathDefinition', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(JsonFormat, self).__init__(**kwargs) + self.type = 'JsonFormat' # type: str + self.file_pattern = kwargs.get('file_pattern', None) + self.nesting_separator = kwargs.get('nesting_separator', None) + self.encoding_name = kwargs.get('encoding_name', None) + self.json_node_reference = kwargs.get('json_node_reference', None) + self.json_path_definition = kwargs.get('json_path_definition', None) + + +class JsonSink(CopySink): + """A copy activity Json sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param store_settings: Json store settings. + :type store_settings: ~azure.synapse.artifacts.models.StoreWriteSettings + :param format_settings: Json format settings. + :type format_settings: ~azure.synapse.artifacts.models.JsonWriteSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'JsonWriteSettings'}, + } + + def __init__( + self, + **kwargs + ): + super(JsonSink, self).__init__(**kwargs) + self.type = 'JsonSink' # type: str + self.store_settings = kwargs.get('store_settings', None) + self.format_settings = kwargs.get('format_settings', None) + + +class JsonSource(CopySource): + """A copy activity Json source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param store_settings: Json store settings. + :type store_settings: ~azure.synapse.artifacts.models.StoreReadSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + } + + def __init__( + self, + **kwargs + ): + super(JsonSource, self).__init__(**kwargs) + self.type = 'JsonSource' # type: str + self.store_settings = kwargs.get('store_settings', None) + + +class JsonWriteSettings(FormatWriteSettings): + """Json write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The write setting type.Constant filled by server. + :type type: str + :param file_pattern: File pattern of JSON. This setting controls the way a collection of JSON + objects will be treated. The default value is 'setOfObjects'. It is case-sensitive. Possible + values include: "setOfObjects", "arrayOfObjects". + :type file_pattern: str or ~azure.synapse.artifacts.models.JsonWriteFilePattern + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'file_pattern': {'key': 'filePattern', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(JsonWriteSettings, self).__init__(**kwargs) + self.type = 'JsonWriteSettings' # type: str + self.file_pattern = kwargs.get('file_pattern', None) + + +class LibraryRequirements(msrest.serialization.Model): + """Library requirements for a Big Data pool powered by Apache Spark. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar time: The last update time of the library requirements file. + :vartype time: ~datetime.datetime + :param content: The library requirements. + :type content: str + :param filename: The filename of the library requirements file. + :type filename: str + """ + + _validation = { + 'time': {'readonly': True}, + } + + _attribute_map = { + 'time': {'key': 'time', 'type': 'iso-8601'}, + 'content': {'key': 'content', 'type': 'str'}, + 'filename': {'key': 'filename', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(LibraryRequirements, self).__init__(**kwargs) + self.time = None + self.content = kwargs.get('content', None) + self.filename = kwargs.get('filename', None) + + +class LinkedIntegrationRuntimeType(msrest.serialization.Model): + """The base definition of a linked integration runtime. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: LinkedIntegrationRuntimeKeyAuthorization, LinkedIntegrationRuntimeRbacAuthorization. + + All required parameters must be populated in order to send to Azure. + + :param authorization_type: Required. The authorization type for integration runtime + sharing.Constant filled by server. + :type authorization_type: str + """ + + _validation = { + 'authorization_type': {'required': True}, + } + + _attribute_map = { + 'authorization_type': {'key': 'authorizationType', 'type': 'str'}, + } + + _subtype_map = { + 'authorization_type': {'Key': 'LinkedIntegrationRuntimeKeyAuthorization', 'RBAC': 'LinkedIntegrationRuntimeRbacAuthorization'} + } + + def __init__( + self, + **kwargs + ): + super(LinkedIntegrationRuntimeType, self).__init__(**kwargs) + self.authorization_type = None # type: Optional[str] + + +class LinkedIntegrationRuntimeKeyAuthorization(LinkedIntegrationRuntimeType): + """The key authorization type integration runtime. + + All required parameters must be populated in order to send to Azure. + + :param authorization_type: Required. The authorization type for integration runtime + sharing.Constant filled by server. + :type authorization_type: str + :param key: Required. The key used for authorization. + :type key: ~azure.synapse.artifacts.models.SecureString + """ + + _validation = { + 'authorization_type': {'required': True}, + 'key': {'required': True}, + } + + _attribute_map = { + 'authorization_type': {'key': 'authorizationType', 'type': 'str'}, + 'key': {'key': 'key', 'type': 'SecureString'}, + } + + def __init__( + self, + **kwargs + ): + super(LinkedIntegrationRuntimeKeyAuthorization, self).__init__(**kwargs) + self.authorization_type = 'Key' # type: str + self.key = kwargs['key'] + + +class LinkedIntegrationRuntimeRbacAuthorization(LinkedIntegrationRuntimeType): + """The role based access control (RBAC) authorization type integration runtime. + + All required parameters must be populated in order to send to Azure. + + :param authorization_type: Required. The authorization type for integration runtime + sharing.Constant filled by server. + :type authorization_type: str + :param resource_id: Required. The resource identifier of the integration runtime to be shared. + :type resource_id: str + """ + + _validation = { + 'authorization_type': {'required': True}, + 'resource_id': {'required': True}, + } + + _attribute_map = { + 'authorization_type': {'key': 'authorizationType', 'type': 'str'}, + 'resource_id': {'key': 'resourceId', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(LinkedIntegrationRuntimeRbacAuthorization, self).__init__(**kwargs) + self.authorization_type = 'RBAC' # type: str + self.resource_id = kwargs['resource_id'] + + +class LinkedServiceDebugResource(SubResourceDebugResource): + """Linked service debug resource. + + All required parameters must be populated in order to send to Azure. + + :param name: The resource name. + :type name: str + :param properties: Required. Properties of linked service. + :type properties: ~azure.synapse.artifacts.models.LinkedService + """ + + _validation = { + 'properties': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'LinkedService'}, + } + + def __init__( + self, + **kwargs + ): + super(LinkedServiceDebugResource, self).__init__(**kwargs) + self.properties = kwargs['properties'] + + +class LinkedServiceListResponse(msrest.serialization.Model): + """A list of linked service resources. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of linked services. + :type value: list[~azure.synapse.artifacts.models.LinkedServiceResource] + :param next_link: The link to the next page of results, if any remaining results exist. + :type next_link: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[LinkedServiceResource]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(LinkedServiceListResponse, self).__init__(**kwargs) + self.value = kwargs['value'] + self.next_link = kwargs.get('next_link', None) + + +class LinkedServiceReference(msrest.serialization.Model): + """Linked service reference type. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Linked service reference type. Possible values include: + "LinkedServiceReference". + :type type: str or ~azure.synapse.artifacts.models.Type + :param reference_name: Required. Reference LinkedService name. + :type reference_name: str + :param parameters: Arguments for LinkedService. + :type parameters: dict[str, object] + """ + + _validation = { + 'type': {'required': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{object}'}, + } + + def __init__( + self, + **kwargs + ): + super(LinkedServiceReference, self).__init__(**kwargs) + self.type = kwargs['type'] + self.reference_name = kwargs['reference_name'] + self.parameters = kwargs.get('parameters', None) + + +class LinkedServiceResource(AzureEntityResource): + """Linked service resource type. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + :ivar etag: Resource Etag. + :vartype etag: str + :param properties: Required. Properties of linked service. + :type properties: ~azure.synapse.artifacts.models.LinkedService + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'LinkedService'}, + } + + def __init__( + self, + **kwargs + ): + super(LinkedServiceResource, self).__init__(**kwargs) + self.properties = kwargs['properties'] + + +class LogStorageSettings(msrest.serialization.Model): + """Log storage settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param linked_service_name: Required. Log storage linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param path: The path to storage for storing detailed logs of activity execution. Type: string + (or Expression with resultType string). + :type path: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'path': {'key': 'path', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(LogStorageSettings, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.linked_service_name = kwargs['linked_service_name'] + self.path = kwargs.get('path', None) + + +class LookupActivity(ExecutionActivity): + """Lookup activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param linked_service_name: Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param source: Required. Dataset-specific source properties, same as copy activity source. + :type source: ~azure.synapse.artifacts.models.CopySource + :param dataset: Required. Lookup activity dataset reference. + :type dataset: ~azure.synapse.artifacts.models.DatasetReference + :param first_row_only: Whether to return first row or all rows. Default value is true. Type: + boolean (or Expression with resultType boolean). + :type first_row_only: object + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'source': {'required': True}, + 'dataset': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'source': {'key': 'typeProperties.source', 'type': 'CopySource'}, + 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, + 'first_row_only': {'key': 'typeProperties.firstRowOnly', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(LookupActivity, self).__init__(**kwargs) + self.type = 'Lookup' # type: str + self.source = kwargs['source'] + self.dataset = kwargs['dataset'] + self.first_row_only = kwargs.get('first_row_only', None) -class HDInsightOnDemandLinkedService(LinkedService): - """HDInsight ondemand linked service. +class MagentoLinkedService(LinkedService): + """Magento server linked service. All required parameters must be populated in order to send to Azure. @@ -9097,466 +17365,470 @@ class HDInsightOnDemandLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param cluster_size: Required. Number of worker/data nodes in the cluster. Suggestion value: 4. - Type: string (or Expression with resultType string). - :type cluster_size: object - :param time_to_live: Required. The allowed idle time for the on-demand HDInsight cluster. - Specifies how long the on-demand HDInsight cluster stays alive after completion of an activity - run if there are no other active jobs in the cluster. The minimum value is 5 mins. Type: string - (or Expression with resultType string). - :type time_to_live: object - :param version: Required. Version of the HDInsight cluster.  Type: string (or Expression with - resultType string). - :type version: object - :param linked_service_name: Required. Azure Storage linked service to be used by the on-demand - cluster for storing and processing data. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param host_subscription_id: Required. The customer’s subscription to host the cluster. Type: - string (or Expression with resultType string). - :type host_subscription_id: object - :param service_principal_id: The service principal id for the hostSubscriptionId. Type: string - (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The key for the service principal id. - :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase - :param tenant: Required. The Tenant id/name to which the service principal belongs. Type: - string (or Expression with resultType string). - :type tenant: object - :param cluster_resource_group: Required. The resource group where the cluster belongs. Type: - string (or Expression with resultType string). - :type cluster_resource_group: object - :param cluster_name_prefix: The prefix of cluster name, postfix will be distinct with - timestamp. Type: string (or Expression with resultType string). - :type cluster_name_prefix: object - :param cluster_user_name: The username to access the cluster. Type: string (or Expression with - resultType string). - :type cluster_user_name: object - :param cluster_password: The password to access the cluster. - :type cluster_password: ~azure.synapse.artifacts.models.SecretBase - :param cluster_ssh_user_name: The username to SSH remotely connect to cluster’s node (for - Linux). Type: string (or Expression with resultType string). - :type cluster_ssh_user_name: object - :param cluster_ssh_password: The password to SSH remotely connect cluster’s node (for Linux). - :type cluster_ssh_password: ~azure.synapse.artifacts.models.SecretBase - :param additional_linked_service_names: Specifies additional storage accounts for the HDInsight - linked service so that the Data Factory service can register them on your behalf. - :type additional_linked_service_names: - list[~azure.synapse.artifacts.models.LinkedServiceReference] - :param hcatalog_linked_service_name: The name of Azure SQL linked service that point to the - HCatalog database. The on-demand HDInsight cluster is created by using the Azure SQL database - as the metastore. - :type hcatalog_linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param cluster_type: The cluster type. Type: string (or Expression with resultType string). - :type cluster_type: object - :param spark_version: The version of spark if the cluster type is 'spark'. Type: string (or - Expression with resultType string). - :type spark_version: object - :param core_configuration: Specifies the core configuration parameters (as in core-site.xml) - for the HDInsight cluster to be created. - :type core_configuration: object - :param h_base_configuration: Specifies the HBase configuration parameters (hbase-site.xml) for - the HDInsight cluster. - :type h_base_configuration: object - :param hdfs_configuration: Specifies the HDFS configuration parameters (hdfs-site.xml) for the - HDInsight cluster. - :type hdfs_configuration: object - :param hive_configuration: Specifies the hive configuration parameters (hive-site.xml) for the - HDInsight cluster. - :type hive_configuration: object - :param map_reduce_configuration: Specifies the MapReduce configuration parameters (mapred- - site.xml) for the HDInsight cluster. - :type map_reduce_configuration: object - :param oozie_configuration: Specifies the Oozie configuration parameters (oozie-site.xml) for - the HDInsight cluster. - :type oozie_configuration: object - :param storm_configuration: Specifies the Storm configuration parameters (storm-site.xml) for - the HDInsight cluster. - :type storm_configuration: object - :param yarn_configuration: Specifies the Yarn configuration parameters (yarn-site.xml) for the - HDInsight cluster. - :type yarn_configuration: object + :param host: Required. The URL of the Magento instance. (i.e. 192.168.222.110/magento3). + :type host: object + :param access_token: The access token from Magento. + :type access_token: ~azure.synapse.artifacts.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using + HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name in the server's + certificate to match the host name of the server when connecting over SSL. The default value is + true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of the server when + connecting over SSL. The default value is true. + :type use_peer_verification: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object - :param head_node_size: Specifies the size of the head node for the HDInsight cluster. - :type head_node_size: object - :param data_node_size: Specifies the size of the data node for the HDInsight cluster. - :type data_node_size: object - :param zookeeper_node_size: Specifies the size of the Zoo Keeper node for the HDInsight - cluster. - :type zookeeper_node_size: object - :param script_actions: Custom script actions to run on HDI ondemand cluster once it's up. - Please refer to https://docs.microsoft.com/en-us/azure/hdinsight/hdinsight-hadoop-customize- - cluster-linux?toc=%2Fen-us%2Fazure%2Fhdinsight%2Fr-server%2FTOC.json&bc=%2Fen- - us%2Fazure%2Fbread%2Ftoc.json#understanding-script-actions. - :type script_actions: list[~azure.synapse.artifacts.models.ScriptAction] - :param virtual_network_id: The ARM resource ID for the vNet to which the cluster should be - joined after creation. Type: string (or Expression with resultType string). - :type virtual_network_id: object - :param subnet_name: The ARM resource ID for the subnet in the vNet. If virtualNetworkId was - specified, then this property is required. Type: string (or Expression with resultType string). - :type subnet_name: object """ _validation = { - 'type': {'required': True}, - 'cluster_size': {'required': True}, - 'time_to_live': {'required': True}, - 'version': {'required': True}, - 'linked_service_name': {'required': True}, - 'host_subscription_id': {'required': True}, - 'tenant': {'required': True}, - 'cluster_resource_group': {'required': True}, + 'type': {'required': True}, + 'host': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(MagentoLinkedService, self).__init__(**kwargs) + self.type = 'Magento' # type: str + self.host = kwargs['host'] + self.access_token = kwargs.get('access_token', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + + +class MagentoObjectDataset(Dataset): + """Magento server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: The table name. Type: string (or Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(MagentoObjectDataset, self).__init__(**kwargs) + self.type = 'MagentoObject' # type: str + self.table_name = kwargs.get('table_name', None) + + +class MagentoSource(TabularSource): + """A copy activity Magento server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(MagentoSource, self).__init__(**kwargs) + self.type = 'MagentoSource' # type: str + self.query = kwargs.get('query', None) + + +class ManagedIdentity(msrest.serialization.Model): + """The workspace managed identity. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar principal_id: The principal ID of the workspace managed identity. + :vartype principal_id: str + :ivar tenant_id: The tenant ID of the workspace managed identity. + :vartype tenant_id: str + :param type: The type of managed identity for the workspace. Possible values include: "None", + "SystemAssigned". + :type type: str or ~azure.synapse.artifacts.models.ResourceIdentityType + """ + + _validation = { + 'principal_id': {'readonly': True}, + 'tenant_id': {'readonly': True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, + 'principal_id': {'key': 'principalId', 'type': 'str'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'cluster_size': {'key': 'typeProperties.clusterSize', 'type': 'object'}, - 'time_to_live': {'key': 'typeProperties.timeToLive', 'type': 'object'}, - 'version': {'key': 'typeProperties.version', 'type': 'object'}, - 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, - 'host_subscription_id': {'key': 'typeProperties.hostSubscriptionId', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'cluster_resource_group': {'key': 'typeProperties.clusterResourceGroup', 'type': 'object'}, - 'cluster_name_prefix': {'key': 'typeProperties.clusterNamePrefix', 'type': 'object'}, - 'cluster_user_name': {'key': 'typeProperties.clusterUserName', 'type': 'object'}, - 'cluster_password': {'key': 'typeProperties.clusterPassword', 'type': 'SecretBase'}, - 'cluster_ssh_user_name': {'key': 'typeProperties.clusterSshUserName', 'type': 'object'}, - 'cluster_ssh_password': {'key': 'typeProperties.clusterSshPassword', 'type': 'SecretBase'}, - 'additional_linked_service_names': {'key': 'typeProperties.additionalLinkedServiceNames', 'type': '[LinkedServiceReference]'}, - 'hcatalog_linked_service_name': {'key': 'typeProperties.hcatalogLinkedServiceName', 'type': 'LinkedServiceReference'}, - 'cluster_type': {'key': 'typeProperties.clusterType', 'type': 'object'}, - 'spark_version': {'key': 'typeProperties.sparkVersion', 'type': 'object'}, - 'core_configuration': {'key': 'typeProperties.coreConfiguration', 'type': 'object'}, - 'h_base_configuration': {'key': 'typeProperties.hBaseConfiguration', 'type': 'object'}, - 'hdfs_configuration': {'key': 'typeProperties.hdfsConfiguration', 'type': 'object'}, - 'hive_configuration': {'key': 'typeProperties.hiveConfiguration', 'type': 'object'}, - 'map_reduce_configuration': {'key': 'typeProperties.mapReduceConfiguration', 'type': 'object'}, - 'oozie_configuration': {'key': 'typeProperties.oozieConfiguration', 'type': 'object'}, - 'storm_configuration': {'key': 'typeProperties.stormConfiguration', 'type': 'object'}, - 'yarn_configuration': {'key': 'typeProperties.yarnConfiguration', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'head_node_size': {'key': 'typeProperties.headNodeSize', 'type': 'object'}, - 'data_node_size': {'key': 'typeProperties.dataNodeSize', 'type': 'object'}, - 'zookeeper_node_size': {'key': 'typeProperties.zookeeperNodeSize', 'type': 'object'}, - 'script_actions': {'key': 'typeProperties.scriptActions', 'type': '[ScriptAction]'}, - 'virtual_network_id': {'key': 'typeProperties.virtualNetworkId', 'type': 'object'}, - 'subnet_name': {'key': 'typeProperties.subnetName', 'type': 'object'}, } def __init__( self, **kwargs ): - super(HDInsightOnDemandLinkedService, self).__init__(**kwargs) - self.type = 'HDInsightOnDemand' # type: str - self.cluster_size = kwargs['cluster_size'] - self.time_to_live = kwargs['time_to_live'] - self.version = kwargs['version'] - self.linked_service_name = kwargs['linked_service_name'] - self.host_subscription_id = kwargs['host_subscription_id'] - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.tenant = kwargs['tenant'] - self.cluster_resource_group = kwargs['cluster_resource_group'] - self.cluster_name_prefix = kwargs.get('cluster_name_prefix', None) - self.cluster_user_name = kwargs.get('cluster_user_name', None) - self.cluster_password = kwargs.get('cluster_password', None) - self.cluster_ssh_user_name = kwargs.get('cluster_ssh_user_name', None) - self.cluster_ssh_password = kwargs.get('cluster_ssh_password', None) - self.additional_linked_service_names = kwargs.get('additional_linked_service_names', None) - self.hcatalog_linked_service_name = kwargs.get('hcatalog_linked_service_name', None) - self.cluster_type = kwargs.get('cluster_type', None) - self.spark_version = kwargs.get('spark_version', None) - self.core_configuration = kwargs.get('core_configuration', None) - self.h_base_configuration = kwargs.get('h_base_configuration', None) - self.hdfs_configuration = kwargs.get('hdfs_configuration', None) - self.hive_configuration = kwargs.get('hive_configuration', None) - self.map_reduce_configuration = kwargs.get('map_reduce_configuration', None) - self.oozie_configuration = kwargs.get('oozie_configuration', None) - self.storm_configuration = kwargs.get('storm_configuration', None) - self.yarn_configuration = kwargs.get('yarn_configuration', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.head_node_size = kwargs.get('head_node_size', None) - self.data_node_size = kwargs.get('data_node_size', None) - self.zookeeper_node_size = kwargs.get('zookeeper_node_size', None) - self.script_actions = kwargs.get('script_actions', None) - self.virtual_network_id = kwargs.get('virtual_network_id', None) - self.subnet_name = kwargs.get('subnet_name', None) + super(ManagedIdentity, self).__init__(**kwargs) + self.principal_id = None + self.tenant_id = None + self.type = kwargs.get('type', None) -class HDInsightPigActivity(ExecutionActivity): - """HDInsight Pig activity type. +class ManagedIntegrationRuntime(IntegrationRuntime): + """Managed integration runtime, including managed elastic and managed dedicated integration runtimes. + + Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. + :param type: Required. Type of integration runtime.Constant filled by server. Possible values + include: "Managed", "SelfHosted". + :type type: str or ~azure.synapse.artifacts.models.IntegrationRuntimeType + :param description: Integration runtime description. :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param storage_linked_services: Storage linked service references. - :type storage_linked_services: list[~azure.synapse.artifacts.models.LinkedServiceReference] - :param arguments: User specified arguments to HDInsightActivity. Type: array (or Expression - with resultType array). - :type arguments: object - :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". - :type get_debug_info: str or ~azure.synapse.artifacts.models.HDInsightActivityDebugInfoOption - :param script_path: Script path. Type: string (or Expression with resultType string). - :type script_path: object - :param script_linked_service: Script linked service reference. - :type script_linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference - :param defines: Allows user to specify defines for Pig job request. - :type defines: dict[str, object] + :ivar state: Integration runtime state, only valid for managed dedicated integration runtime. + Possible values include: "Initial", "Stopped", "Started", "Starting", "Stopping", + "NeedRegistration", "Online", "Limited", "Offline", "AccessDenied". + :vartype state: str or ~azure.synapse.artifacts.models.IntegrationRuntimeState + :param compute_properties: The compute resource for managed integration runtime. + :type compute_properties: ~azure.synapse.artifacts.models.IntegrationRuntimeComputeProperties + :param ssis_properties: SSIS properties for managed integration runtime. + :type ssis_properties: ~azure.synapse.artifacts.models.IntegrationRuntimeSsisProperties """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, + 'state': {'readonly': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, - 'arguments': {'key': 'typeProperties.arguments', 'type': 'object'}, - 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, - 'script_path': {'key': 'typeProperties.scriptPath', 'type': 'object'}, - 'script_linked_service': {'key': 'typeProperties.scriptLinkedService', 'type': 'LinkedServiceReference'}, - 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, + 'state': {'key': 'state', 'type': 'str'}, + 'compute_properties': {'key': 'typeProperties.computeProperties', 'type': 'IntegrationRuntimeComputeProperties'}, + 'ssis_properties': {'key': 'typeProperties.ssisProperties', 'type': 'IntegrationRuntimeSsisProperties'}, } def __init__( self, **kwargs ): - super(HDInsightPigActivity, self).__init__(**kwargs) - self.type = 'HDInsightPig' # type: str - self.storage_linked_services = kwargs.get('storage_linked_services', None) - self.arguments = kwargs.get('arguments', None) - self.get_debug_info = kwargs.get('get_debug_info', None) - self.script_path = kwargs.get('script_path', None) - self.script_linked_service = kwargs.get('script_linked_service', None) - self.defines = kwargs.get('defines', None) + super(ManagedIntegrationRuntime, self).__init__(**kwargs) + self.type = 'Managed' # type: str + self.state = None + self.compute_properties = kwargs.get('compute_properties', None) + self.ssis_properties = kwargs.get('ssis_properties', None) + + +class MappingDataFlow(DataFlow): + """Mapping data flow. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Type of data flow.Constant filled by server. + :type type: str + :param description: The description of the data flow. + :type description: str + :param annotations: List of tags that can be used for describing the data flow. + :type annotations: list[object] + :param folder: The folder that this data flow is in. If not specified, Data flow will appear at + the root level. + :type folder: ~azure.synapse.artifacts.models.DataFlowFolder + :param sources: List of sources in data flow. + :type sources: list[~azure.synapse.artifacts.models.DataFlowSource] + :param sinks: List of sinks in data flow. + :type sinks: list[~azure.synapse.artifacts.models.DataFlowSink] + :param transformations: List of transformations in data flow. + :type transformations: list[~azure.synapse.artifacts.models.Transformation] + :param script: DataFlow script. + :type script: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DataFlowFolder'}, + 'sources': {'key': 'typeProperties.sources', 'type': '[DataFlowSource]'}, + 'sinks': {'key': 'typeProperties.sinks', 'type': '[DataFlowSink]'}, + 'transformations': {'key': 'typeProperties.transformations', 'type': '[Transformation]'}, + 'script': {'key': 'typeProperties.script', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(MappingDataFlow, self).__init__(**kwargs) + self.type = 'MappingDataFlow' # type: str + self.sources = kwargs.get('sources', None) + self.sinks = kwargs.get('sinks', None) + self.transformations = kwargs.get('transformations', None) + self.script = kwargs.get('script', None) + + +class MariaDBLinkedService(LinkedService): + """MariaDB server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param connection_string: An ODBC connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param pwd: The Azure key vault secret reference of password in connection string. + :type pwd: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + def __init__( + self, + **kwargs + ): + super(MariaDBLinkedService, self).__init__(**kwargs) + self.type = 'MariaDB' # type: str + self.connection_string = kwargs.get('connection_string', None) + self.pwd = kwargs.get('pwd', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) -class HDInsightSparkActivity(ExecutionActivity): - """HDInsight Spark activity. + +class MariaDBSource(TabularSource): + """A copy activity MariaDB server source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param root_path: Required. The root path in 'sparkJobLinkedService' for all the job’s files. - Type: string (or Expression with resultType string). - :type root_path: object - :param entry_file_path: Required. The relative path to the root folder of the code/package to - be executed. Type: string (or Expression with resultType string). - :type entry_file_path: object - :param arguments: The user-specified arguments to HDInsightSparkActivity. - :type arguments: list[object] - :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". - :type get_debug_info: str or ~azure.synapse.artifacts.models.HDInsightActivityDebugInfoOption - :param spark_job_linked_service: The storage linked service for uploading the entry file and - dependencies, and for receiving logs. - :type spark_job_linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference - :param class_name: The application's Java/Spark main class. - :type class_name: str - :param proxy_user: The user to impersonate that will execute the job. Type: string (or - Expression with resultType string). - :type proxy_user: object - :param spark_config: Spark configuration property. - :type spark_config: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, - 'root_path': {'required': True}, - 'entry_file_path': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'root_path': {'key': 'typeProperties.rootPath', 'type': 'object'}, - 'entry_file_path': {'key': 'typeProperties.entryFilePath', 'type': 'object'}, - 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, - 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, - 'spark_job_linked_service': {'key': 'typeProperties.sparkJobLinkedService', 'type': 'LinkedServiceReference'}, - 'class_name': {'key': 'typeProperties.className', 'type': 'str'}, - 'proxy_user': {'key': 'typeProperties.proxyUser', 'type': 'object'}, - 'spark_config': {'key': 'typeProperties.sparkConfig', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, } def __init__( self, **kwargs ): - super(HDInsightSparkActivity, self).__init__(**kwargs) - self.type = 'HDInsightSpark' # type: str - self.root_path = kwargs['root_path'] - self.entry_file_path = kwargs['entry_file_path'] - self.arguments = kwargs.get('arguments', None) - self.get_debug_info = kwargs.get('get_debug_info', None) - self.spark_job_linked_service = kwargs.get('spark_job_linked_service', None) - self.class_name = kwargs.get('class_name', None) - self.proxy_user = kwargs.get('proxy_user', None) - self.spark_config = kwargs.get('spark_config', None) + super(MariaDBSource, self).__init__(**kwargs) + self.type = 'MariaDBSource' # type: str + self.query = kwargs.get('query', None) -class HDInsightStreamingActivity(ExecutionActivity): - """HDInsight streaming activity type. +class MariaDBTableDataset(Dataset): + """MariaDB server dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. Type of dataset.Constant filled by server. :type type: str - :param description: Activity description. + :param description: Dataset description. :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param storage_linked_services: Storage linked service references. - :type storage_linked_services: list[~azure.synapse.artifacts.models.LinkedServiceReference] - :param arguments: User specified arguments to HDInsightActivity. - :type arguments: list[object] - :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". - :type get_debug_info: str or ~azure.synapse.artifacts.models.HDInsightActivityDebugInfoOption - :param mapper: Required. Mapper executable name. Type: string (or Expression with resultType - string). - :type mapper: object - :param reducer: Required. Reducer executable name. Type: string (or Expression with resultType - string). - :type reducer: object - :param input: Required. Input blob path. Type: string (or Expression with resultType string). - :type input: object - :param output: Required. Output blob path. Type: string (or Expression with resultType string). - :type output: object - :param file_paths: Required. Paths to streaming job files. Can be directories. - :type file_paths: list[object] - :param file_linked_service: Linked service reference where the files are located. - :type file_linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference - :param combiner: Combiner executable name. Type: string (or Expression with resultType string). - :type combiner: object - :param command_environment: Command line environment values. - :type command_environment: list[object] - :param defines: Allows user to specify defines for streaming job request. - :type defines: dict[str, object] + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: The table name. Type: string (or Expression with resultType string). + :type table_name: object """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, - 'mapper': {'required': True}, - 'reducer': {'required': True}, - 'input': {'required': True}, - 'output': {'required': True}, - 'file_paths': {'required': True}, + 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, - 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, - 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, - 'mapper': {'key': 'typeProperties.mapper', 'type': 'object'}, - 'reducer': {'key': 'typeProperties.reducer', 'type': 'object'}, - 'input': {'key': 'typeProperties.input', 'type': 'object'}, - 'output': {'key': 'typeProperties.output', 'type': 'object'}, - 'file_paths': {'key': 'typeProperties.filePaths', 'type': '[object]'}, - 'file_linked_service': {'key': 'typeProperties.fileLinkedService', 'type': 'LinkedServiceReference'}, - 'combiner': {'key': 'typeProperties.combiner', 'type': 'object'}, - 'command_environment': {'key': 'typeProperties.commandEnvironment', 'type': '[object]'}, - 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } def __init__( self, **kwargs ): - super(HDInsightStreamingActivity, self).__init__(**kwargs) - self.type = 'HDInsightStreaming' # type: str - self.storage_linked_services = kwargs.get('storage_linked_services', None) - self.arguments = kwargs.get('arguments', None) - self.get_debug_info = kwargs.get('get_debug_info', None) - self.mapper = kwargs['mapper'] - self.reducer = kwargs['reducer'] - self.input = kwargs['input'] - self.output = kwargs['output'] - self.file_paths = kwargs['file_paths'] - self.file_linked_service = kwargs.get('file_linked_service', None) - self.combiner = kwargs.get('combiner', None) - self.command_environment = kwargs.get('command_environment', None) - self.defines = kwargs.get('defines', None) + super(MariaDBTableDataset, self).__init__(**kwargs) + self.type = 'MariaDBTable' # type: str + self.table_name = kwargs.get('table_name', None) -class HiveLinkedService(LinkedService): - """Hive Server linked service. +class MarketoLinkedService(LinkedService): + """Marketo server linked service. All required parameters must be populated in order to send to Azure. @@ -9573,53 +17845,22 @@ class HiveLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param host: Required. IP address or host name of the Hive server, separated by ';' for - multiple hosts (only when serviceDiscoveryMode is enable). - :type host: object - :param port: The TCP port that the Hive server uses to listen for client connections. - :type port: object - :param server_type: The type of Hive server. Possible values include: "HiveServer1", - "HiveServer2", "HiveThriftServer". - :type server_type: str or ~azure.synapse.artifacts.models.HiveServerType - :param thrift_transport_protocol: The transport protocol to use in the Thrift layer. Possible - values include: "Binary", "SASL", "HTTP ". - :type thrift_transport_protocol: str or - ~azure.synapse.artifacts.models.HiveThriftTransportProtocol - :param authentication_type: Required. The authentication method used to access the Hive server. - Possible values include: "Anonymous", "Username", "UsernameAndPassword", - "WindowsAzureHDInsightService". - :type authentication_type: str or ~azure.synapse.artifacts.models.HiveAuthenticationType - :param service_discovery_mode: true to indicate using the ZooKeeper service, false not. - :type service_discovery_mode: object - :param zoo_keeper_name_space: The namespace on ZooKeeper under which Hive Server 2 nodes are - added. - :type zoo_keeper_name_space: object - :param use_native_query: Specifies whether the driver uses native HiveQL queries,or converts - them into an equivalent form in HiveQL. - :type use_native_query: object - :param username: The user name that you use to access Hive Server. - :type username: object - :param password: The password corresponding to the user name that you provided in the Username - field. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param http_path: The partial URL corresponding to the Hive server. - :type http_path: object - :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The - default value is false. - :type enable_ssl: object - :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for - verifying the server when connecting over SSL. This property can only be set when using SSL on - self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust - store or from a specified PEM file. The default value is false. - :type use_system_trust_store: object - :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate - name to match the host name of the server when connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: object - :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from - the server. The default value is false. - :type allow_self_signed_server_cert: object + :param endpoint: Required. The endpoint of the Marketo server. (i.e. 123-ABC-321.mktorest.com). + :type endpoint: object + :param client_id: Required. The client Id of your Marketo service. + :type client_id: object + :param client_secret: The client secret of your Marketo service. + :type client_secret: ~azure.synapse.artifacts.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using + HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name in the server's + certificate to match the host name of the server when connecting over SSL. The default value is + true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of the server when + connecting over SSL. The default value is true. + :type use_peer_verification: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -9628,8 +17869,8 @@ class HiveLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'host': {'required': True}, - 'authentication_type': {'required': True}, + 'endpoint': {'required': True}, + 'client_id': {'required': True}, } _attribute_map = { @@ -9639,22 +17880,12 @@ class HiveLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'server_type': {'key': 'typeProperties.serverType', 'type': 'str'}, - 'thrift_transport_protocol': {'key': 'typeProperties.thriftTransportProtocol', 'type': 'str'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'service_discovery_mode': {'key': 'typeProperties.serviceDiscoveryMode', 'type': 'object'}, - 'zoo_keeper_name_space': {'key': 'typeProperties.zooKeeperNameSpace', 'type': 'object'}, - 'use_native_query': {'key': 'typeProperties.useNativeQuery', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, - 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, - 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -9662,29 +17893,19 @@ def __init__( self, **kwargs ): - super(HiveLinkedService, self).__init__(**kwargs) - self.type = 'Hive' # type: str - self.host = kwargs['host'] - self.port = kwargs.get('port', None) - self.server_type = kwargs.get('server_type', None) - self.thrift_transport_protocol = kwargs.get('thrift_transport_protocol', None) - self.authentication_type = kwargs['authentication_type'] - self.service_discovery_mode = kwargs.get('service_discovery_mode', None) - self.zoo_keeper_name_space = kwargs.get('zoo_keeper_name_space', None) - self.use_native_query = kwargs.get('use_native_query', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.http_path = kwargs.get('http_path', None) - self.enable_ssl = kwargs.get('enable_ssl', None) - self.trusted_cert_path = kwargs.get('trusted_cert_path', None) - self.use_system_trust_store = kwargs.get('use_system_trust_store', None) - self.allow_host_name_cn_mismatch = kwargs.get('allow_host_name_cn_mismatch', None) - self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) + super(MarketoLinkedService, self).__init__(**kwargs) + self.type = 'Marketo' # type: str + self.endpoint = kwargs['endpoint'] + self.client_id = kwargs['client_id'] + self.client_secret = kwargs.get('client_secret', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) -class HiveObjectDataset(Dataset): - """Hive Server dataset. +class MarketoObjectDataset(Dataset): + """Marketo server dataset. All required parameters must be populated in order to send to Azure. @@ -9710,14 +17931,8 @@ class HiveObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table - properties instead. + :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object - :param table: The table name of the Hive. Type: string (or Expression with resultType string). - :type table: object - :param schema_type_properties_schema: The schema name of the Hive. Type: string (or Expression - with resultType string). - :type schema_type_properties_schema: object """ _validation = { @@ -9736,23 +17951,69 @@ class HiveObjectDataset(Dataset): 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } def __init__( self, **kwargs ): - super(HiveObjectDataset, self).__init__(**kwargs) - self.type = 'HiveObject' # type: str + super(MarketoObjectDataset, self).__init__(**kwargs) + self.type = 'MarketoObject' # type: str self.table_name = kwargs.get('table_name', None) - self.table = kwargs.get('table', None) - self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) -class HttpLinkedService(LinkedService): - """Linked service for an HTTP source. +class MarketoSource(TabularSource): + """A copy activity Marketo server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(MarketoSource, self).__init__(**kwargs) + self.type = 'MarketoSource' # type: str + self.query = kwargs.get('query', None) + + +class MicrosoftAccessLinkedService(LinkedService): + """Microsoft Access linked service. All required parameters must be populated in order to send to Azure. @@ -9769,40 +18030,31 @@ class HttpLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param url: Required. The base URL of the HTTP endpoint, e.g. http://www.microsoft.com. Type: - string (or Expression with resultType string). - :type url: object - :param authentication_type: The authentication type to be used to connect to the HTTP server. - Possible values include: "Basic", "Anonymous", "Digest", "Windows", "ClientCertificate". - :type authentication_type: str or ~azure.synapse.artifacts.models.HttpAuthenticationType - :param user_name: User name for Basic, Digest, or Windows authentication. Type: string (or - Expression with resultType string). + :param connection_string: Required. The non-access credential portion of the connection string + as well as an optional encrypted credential. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param authentication_type: Type of authentication used to connect to the Microsoft Access as + ODBC data store. Possible values are: Anonymous and Basic. Type: string (or Expression with + resultType string). + :type authentication_type: object + :param credential: The access credential portion of the connection string specified in driver- + specific property-value format. + :type credential: ~azure.synapse.artifacts.models.SecretBase + :param user_name: User name for Basic authentication. Type: string (or Expression with + resultType string). :type user_name: object - :param password: Password for Basic, Digest, Windows, or ClientCertificate with - EmbeddedCertData authentication. + :param password: Password for Basic authentication. :type password: ~azure.synapse.artifacts.models.SecretBase - :param embedded_cert_data: Base64 encoded certificate data for ClientCertificate - authentication. For on-premises copy with ClientCertificate authentication, either - CertThumbprint or EmbeddedCertData/Password should be specified. Type: string (or Expression - with resultType string). - :type embedded_cert_data: object - :param cert_thumbprint: Thumbprint of certificate for ClientCertificate authentication. Only - valid for on-premises copy. For on-premises copy with ClientCertificate authentication, either - CertThumbprint or EmbeddedCertData/Password should be specified. Type: string (or Expression - with resultType string). - :type cert_thumbprint: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object - :param enable_server_certificate_validation: If true, validate the HTTPS server SSL - certificate. Default value is true. Type: boolean (or Expression with resultType boolean). - :type enable_server_certificate_validation: object """ _validation = { 'type': {'required': True}, - 'url': {'required': True}, + 'connection_string': {'required': True}, } _attribute_map = { @@ -9812,116 +18064,188 @@ class HttpLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'embedded_cert_data': {'key': 'typeProperties.embeddedCertData', 'type': 'object'}, - 'cert_thumbprint': {'key': 'typeProperties.certThumbprint', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, } def __init__( self, **kwargs ): - super(HttpLinkedService, self).__init__(**kwargs) - self.type = 'HttpServer' # type: str - self.url = kwargs['url'] + super(MicrosoftAccessLinkedService, self).__init__(**kwargs) + self.type = 'MicrosoftAccess' # type: str + self.connection_string = kwargs['connection_string'] self.authentication_type = kwargs.get('authentication_type', None) + self.credential = kwargs.get('credential', None) self.user_name = kwargs.get('user_name', None) self.password = kwargs.get('password', None) - self.embedded_cert_data = kwargs.get('embedded_cert_data', None) - self.cert_thumbprint = kwargs.get('cert_thumbprint', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.enable_server_certificate_validation = kwargs.get('enable_server_certificate_validation', None) -class HubspotLinkedService(LinkedService): - """Hubspot Service linked service. +class MicrosoftAccessSink(CopySink): + """A copy activity Microsoft Access sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param pre_copy_script: A query to execute before starting the copy. Type: string (or + Expression with resultType string). + :type pre_copy_script: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(MicrosoftAccessSink, self).__init__(**kwargs) + self.type = 'MicrosoftAccessSink' # type: str + self.pre_copy_script = kwargs.get('pre_copy_script', None) + + +class MicrosoftAccessSource(CopySource): + """A copy activity source for Microsoft Access. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query: Database query. Type: string (or Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(MicrosoftAccessSource, self).__init__(**kwargs) + self.type = 'MicrosoftAccessSource' # type: str + self.query = kwargs.get('query', None) + + +class MicrosoftAccessTableDataset(Dataset): + """The Microsoft Access table dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Type of dataset.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. + :param description: Dataset description. :type description: str - :param parameters: Parameters for linked service. + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. + :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] - :param client_id: Required. The client ID associated with your Hubspot application. - :type client_id: object - :param client_secret: The client secret associated with your Hubspot application. - :type client_secret: ~azure.synapse.artifacts.models.SecretBase - :param access_token: The access token obtained when initially authenticating your OAuth - integration. - :type access_token: ~azure.synapse.artifacts.models.SecretBase - :param refresh_token: The refresh token obtained when initially authenticating your OAuth - integration. - :type refresh_token: ~azure.synapse.artifacts.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using - HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name in the server's - certificate to match the host name of the server when connecting over SSL. The default value is - true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of the server when - connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: The Microsoft Access table name. Type: string (or Expression with resultType + string). + :type table_name: object """ _validation = { 'type': {'required': True}, - 'client_id': {'required': True}, + 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, - 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } def __init__( self, **kwargs ): - super(HubspotLinkedService, self).__init__(**kwargs) - self.type = 'Hubspot' # type: str - self.client_id = kwargs['client_id'] - self.client_secret = kwargs.get('client_secret', None) - self.access_token = kwargs.get('access_token', None) - self.refresh_token = kwargs.get('refresh_token', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(MicrosoftAccessTableDataset, self).__init__(**kwargs) + self.type = 'MicrosoftAccessTable' # type: str + self.table_name = kwargs.get('table_name', None) -class HubspotObjectDataset(Dataset): - """Hubspot Service dataset. +class MongoDbCollectionDataset(Dataset): + """The MongoDB database dataset. All required parameters must be populated in order to send to Azure. @@ -9947,13 +18271,15 @@ class HubspotObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :param collection_name: Required. The table name of the MongoDB database. Type: string (or + Expression with resultType string). + :type collection_name: object """ _validation = { 'type': {'required': True}, 'linked_service_name': {'required': True}, + 'collection_name': {'required': True}, } _attribute_map = { @@ -9966,79 +18292,63 @@ class HubspotObjectDataset(Dataset): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'collection_name': {'key': 'typeProperties.collectionName', 'type': 'object'}, } def __init__( self, **kwargs ): - super(HubspotObjectDataset, self).__init__(**kwargs) - self.type = 'HubspotObject' # type: str - self.table_name = kwargs.get('table_name', None) - + super(MongoDbCollectionDataset, self).__init__(**kwargs) + self.type = 'MongoDbCollection' # type: str + self.collection_name = kwargs['collection_name'] -class IfConditionActivity(Activity): - """This activity evaluates a boolean expression and executes either the activities under the ifTrueActivities property or the ifFalseActivities property depending on the result of the expression. - All required parameters must be populated in order to send to Azure. +class MongoDbCursorMethodsProperties(msrest.serialization.Model): + """Cursor methods for Mongodb query. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param expression: Required. An expression that would evaluate to Boolean. This is used to - determine the block of activities (ifTrueActivities or ifFalseActivities) that will be - executed. - :type expression: ~azure.synapse.artifacts.models.Expression - :param if_true_activities: List of activities to execute if expression is evaluated to true. - This is an optional property and if not provided, the activity will exit without any action. - :type if_true_activities: list[~azure.synapse.artifacts.models.Activity] - :param if_false_activities: List of activities to execute if expression is evaluated to false. - This is an optional property and if not provided, the activity will exit without any action. - :type if_false_activities: list[~azure.synapse.artifacts.models.Activity] + :param project: Specifies the fields to return in the documents that match the query filter. To + return all fields in the matching documents, omit this parameter. Type: string (or Expression + with resultType string). + :type project: object + :param sort: Specifies the order in which the query returns matching documents. Type: string + (or Expression with resultType string). Type: string (or Expression with resultType string). + :type sort: object + :param skip: Specifies the how many documents skipped and where MongoDB begins returning + results. This approach may be useful in implementing paginated results. Type: integer (or + Expression with resultType integer). + :type skip: object + :param limit: Specifies the maximum number of documents the server returns. limit() is + analogous to the LIMIT statement in a SQL database. Type: integer (or Expression with + resultType integer). + :type limit: object """ - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'expression': {'required': True}, - } - _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'expression': {'key': 'typeProperties.expression', 'type': 'Expression'}, - 'if_true_activities': {'key': 'typeProperties.ifTrueActivities', 'type': '[Activity]'}, - 'if_false_activities': {'key': 'typeProperties.ifFalseActivities', 'type': '[Activity]'}, + 'project': {'key': 'project', 'type': 'object'}, + 'sort': {'key': 'sort', 'type': 'object'}, + 'skip': {'key': 'skip', 'type': 'object'}, + 'limit': {'key': 'limit', 'type': 'object'}, } def __init__( self, **kwargs ): - super(IfConditionActivity, self).__init__(**kwargs) - self.type = 'IfCondition' # type: str - self.expression = kwargs['expression'] - self.if_true_activities = kwargs.get('if_true_activities', None) - self.if_false_activities = kwargs.get('if_false_activities', None) + super(MongoDbCursorMethodsProperties, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.project = kwargs.get('project', None) + self.sort = kwargs.get('sort', None) + self.skip = kwargs.get('skip', None) + self.limit = kwargs.get('limit', None) -class ImpalaLinkedService(LinkedService): - """Impala server linked service. +class MongoDbLinkedService(LinkedService): + """Linked service for MongoDb data source. All required parameters must be populated in order to send to Azure. @@ -10055,35 +18365,31 @@ class ImpalaLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param host: Required. The IP address or host name of the Impala server. (i.e. - 192.168.222.160). - :type host: object - :param port: The TCP port that the Impala server uses to listen for client connections. The - default value is 21050. - :type port: object - :param authentication_type: Required. The authentication type to use. Possible values include: - "Anonymous", "SASLUsername", "UsernameAndPassword". - :type authentication_type: str or ~azure.synapse.artifacts.models.ImpalaAuthenticationType - :param username: The user name used to access the Impala server. The default value is anonymous - when using SASLUsername. + :param server: Required. The IP address or server name of the MongoDB server. Type: string (or + Expression with resultType string). + :type server: object + :param authentication_type: The authentication type to be used to connect to the MongoDB + database. Possible values include: "Basic", "Anonymous". + :type authentication_type: str or ~azure.synapse.artifacts.models.MongoDbAuthenticationType + :param database_name: Required. The name of the MongoDB database that you want to access. Type: + string (or Expression with resultType string). + :type database_name: object + :param username: Username for authentication. Type: string (or Expression with resultType + string). :type username: object - :param password: The password corresponding to the user name when using UsernameAndPassword. + :param password: Password for authentication. :type password: ~azure.synapse.artifacts.models.SecretBase + :param auth_source: Database to verify the username and password. Type: string (or Expression + with resultType string). + :type auth_source: object + :param port: The TCP port number that the MongoDB server uses to listen for client connections. + The default value is 27017. Type: integer (or Expression with resultType integer), minimum: 0. + :type port: object :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The - default value is false. + default value is false. Type: boolean (or Expression with resultType boolean). :type enable_ssl: object - :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for - verifying the server when connecting over SSL. This property can only be set when using SSL on - self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust - store or from a specified PEM file. The default value is false. - :type use_system_trust_store: object - :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate - name to match the host name of the server when connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: object :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from - the server. The default value is false. + the server. The default value is false. Type: boolean (or Expression with resultType boolean). :type allow_self_signed_server_cert: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with @@ -10093,8 +18399,8 @@ class ImpalaLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'host': {'required': True}, - 'authentication_type': {'required': True}, + 'server': {'required': True}, + 'database_name': {'required': True}, } _attribute_map = { @@ -10104,15 +18410,14 @@ class ImpalaLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'database_name': {'key': 'typeProperties.databaseName', 'type': 'object'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'auth_source': {'key': 'typeProperties.authSource', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, - 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -10121,23 +18426,68 @@ def __init__( self, **kwargs ): - super(ImpalaLinkedService, self).__init__(**kwargs) - self.type = 'Impala' # type: str - self.host = kwargs['host'] - self.port = kwargs.get('port', None) - self.authentication_type = kwargs['authentication_type'] - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.enable_ssl = kwargs.get('enable_ssl', None) - self.trusted_cert_path = kwargs.get('trusted_cert_path', None) - self.use_system_trust_store = kwargs.get('use_system_trust_store', None) - self.allow_host_name_cn_mismatch = kwargs.get('allow_host_name_cn_mismatch', None) - self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(MongoDbLinkedService, self).__init__(**kwargs) + self.type = 'MongoDb' # type: str + self.server = kwargs['server'] + self.authentication_type = kwargs.get('authentication_type', None) + self.database_name = kwargs['database_name'] + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.auth_source = kwargs.get('auth_source', None) + self.port = kwargs.get('port', None) + self.enable_ssl = kwargs.get('enable_ssl', None) + self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + + +class MongoDbSource(CopySource): + """A copy activity source for a MongoDB database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query: Database query. Should be a SQL-92 query expression. Type: string (or Expression + with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(MongoDbSource, self).__init__(**kwargs) + self.type = 'MongoDbSource' # type: str + self.query = kwargs.get('query', None) -class ImpalaObjectDataset(Dataset): - """Impala server dataset. +class MongoDbV2CollectionDataset(Dataset): + """The MongoDB database dataset. All required parameters must be populated in order to send to Azure. @@ -10163,20 +18513,15 @@ class ImpalaObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table - properties instead. - :type table_name: object - :param table: The table name of the Impala. Type: string (or Expression with resultType - string). - :type table: object - :param schema_type_properties_schema: The schema name of the Impala. Type: string (or + :param collection: Required. The collection name of the MongoDB database. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: object + :type collection: object """ _validation = { 'type': {'required': True}, 'linked_service_name': {'required': True}, + 'collection': {'required': True}, } _attribute_map = { @@ -10189,24 +18534,20 @@ class ImpalaObjectDataset(Dataset): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, } def __init__( self, **kwargs ): - super(ImpalaObjectDataset, self).__init__(**kwargs) - self.type = 'ImpalaObject' # type: str - self.table_name = kwargs.get('table_name', None) - self.table = kwargs.get('table', None) - self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) + super(MongoDbV2CollectionDataset, self).__init__(**kwargs) + self.type = 'MongoDbV2Collection' # type: str + self.collection = kwargs['collection'] -class InformixLinkedService(LinkedService): - """Informix linked service. +class MongoDbV2LinkedService(LinkedService): + """Linked service for MongoDB data source. All required parameters must be populated in order to send to Azure. @@ -10223,31 +18564,18 @@ class InformixLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param connection_string: Required. The non-access credential portion of the connection string - as well as an optional encrypted credential. Type: string, SecureString or - AzureKeyVaultSecretReference. + :param connection_string: Required. The MongoDB connection string. Type: string, SecureString + or AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object - :param authentication_type: Type of authentication used to connect to the Informix as ODBC data - store. Possible values are: Anonymous and Basic. Type: string (or Expression with resultType - string). - :type authentication_type: object - :param credential: The access credential portion of the connection string specified in driver- - specific property-value format. - :type credential: ~azure.synapse.artifacts.models.SecretBase - :param user_name: User name for Basic authentication. Type: string (or Expression with - resultType string). - :type user_name: object - :param password: Password for Basic authentication. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param database: Required. The name of the MongoDB database that you want to access. Type: + string (or Expression with resultType string). + :type database: object """ _validation = { 'type': {'required': True}, 'connection_string': {'required': True}, + 'database': {'required': True}, } _attribute_map = { @@ -10258,123 +18586,84 @@ class InformixLinkedService(LinkedService): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, - 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, } def __init__( self, **kwargs ): - super(InformixLinkedService, self).__init__(**kwargs) - self.type = 'Informix' # type: str + super(MongoDbV2LinkedService, self).__init__(**kwargs) + self.type = 'MongoDbV2' # type: str self.connection_string = kwargs['connection_string'] - self.authentication_type = kwargs.get('authentication_type', None) - self.credential = kwargs.get('credential', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.database = kwargs['database'] -class InformixTableDataset(Dataset): - """The Informix table dataset. +class MongoDbV2Source(CopySource): + """A copy activity source for a MongoDB database. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The Informix table name. Type: string (or Expression with resultType - string). - :type table_name: object + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param filter: Specifies selection filter using query operators. To return all documents in a + collection, omit this parameter or pass an empty document ({}). Type: string (or Expression + with resultType string). + :type filter: object + :param cursor_methods: Cursor methods for Mongodb query. + :type cursor_methods: ~azure.synapse.artifacts.models.MongoDbCursorMethodsProperties + :param batch_size: Specifies the number of documents to return in each batch of the response + from MongoDB instance. In most cases, modifying the batch size will not affect the user or the + application. This property's main purpose is to avoid hit the limitation of response size. + Type: integer (or Expression with resultType integer). + :type batch_size: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(InformixTableDataset, self).__init__(**kwargs) - self.type = 'InformixTable' # type: str - self.table_name = kwargs.get('table_name', None) - - -class IntegrationRuntimeReference(msrest.serialization.Model): - """Integration runtime reference type. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Type of integration runtime. Possible values include: - "IntegrationRuntimeReference". - :type type: str or ~azure.synapse.artifacts.models.IntegrationRuntimeReferenceType - :param reference_name: Required. Reference integration runtime name. - :type reference_name: str - :param parameters: Arguments for integration runtime. - :type parameters: dict[str, object] - """ - - _validation = { - 'type': {'required': True}, - 'reference_name': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'reference_name': {'key': 'referenceName', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'filter': {'key': 'filter', 'type': 'object'}, + 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, + 'batch_size': {'key': 'batchSize', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, } def __init__( self, **kwargs ): - super(IntegrationRuntimeReference, self).__init__(**kwargs) - self.type = kwargs['type'] - self.reference_name = kwargs['reference_name'] - self.parameters = kwargs.get('parameters', None) + super(MongoDbV2Source, self).__init__(**kwargs) + self.type = 'MongoDbV2Source' # type: str + self.filter = kwargs.get('filter', None) + self.cursor_methods = kwargs.get('cursor_methods', None) + self.batch_size = kwargs.get('batch_size', None) + self.query_timeout = kwargs.get('query_timeout', None) -class JiraLinkedService(LinkedService): - """Jira Service linked service. +class MySqlLinkedService(LinkedService): + """Linked service for MySQL data source. All required parameters must be populated in order to send to Azure. @@ -10391,27 +18680,10 @@ class JiraLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param host: Required. The IP address or host name of the Jira service. (e.g. - jira.example.com). - :type host: object - :param port: The TCP port that the Jira server uses to listen for client connections. The - default value is 443 if connecting through HTTPS, or 8080 if connecting through HTTP. - :type port: object - :param username: Required. The user name that you use to access Jira Service. - :type username: object - :param password: The password corresponding to the user name that you provided in the username - field. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using - HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name in the server's - certificate to match the host name of the server when connecting over SSL. The default value is - true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of the server when - connecting over SSL. The default value is true. - :type use_peer_verification: object + :param connection_string: Required. The connection string. + :type connection_string: object + :param password: The Azure key vault secret reference of password in connection string. + :type password: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -10420,8 +18692,7 @@ class JiraLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'host': {'required': True}, - 'username': {'required': True}, + 'connection_string': {'required': True}, } _attribute_map = { @@ -10431,13 +18702,8 @@ class JiraLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -10445,20 +18711,64 @@ def __init__( self, **kwargs ): - super(JiraLinkedService, self).__init__(**kwargs) - self.type = 'Jira' # type: str - self.host = kwargs['host'] - self.port = kwargs.get('port', None) - self.username = kwargs['username'] + super(MySqlLinkedService, self).__init__(**kwargs) + self.type = 'MySql' # type: str + self.connection_string = kwargs['connection_string'] self.password = kwargs.get('password', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) -class JiraObjectDataset(Dataset): - """Jira Service dataset. +class MySqlSource(TabularSource): + """A copy activity source for MySQL databases. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: Database query. Type: string (or Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(MySqlSource, self).__init__(**kwargs) + self.type = 'MySqlSource' # type: str + self.query = kwargs.get('query', None) + + +class MySqlTableDataset(Dataset): + """The MySQL table dataset. All required parameters must be populated in order to send to Azure. @@ -10484,7 +18794,7 @@ class JiraObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). + :param table_name: The MySQL table name. Type: string (or Expression with resultType string). :type table_name: object """ @@ -10510,611 +18820,599 @@ def __init__( self, **kwargs ): - super(JiraObjectDataset, self).__init__(**kwargs) - self.type = 'JiraObject' # type: str + super(MySqlTableDataset, self).__init__(**kwargs) + self.type = 'MySqlTable' # type: str self.table_name = kwargs.get('table_name', None) -class JsonDataset(Dataset): - """Json dataset. +class NetezzaLinkedService(LinkedService): + """Netezza linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Type of linked service.Constant filled by server. :type type: str - :param description: Dataset description. + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. + :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. + :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param location: The location of the json data storage. - :type location: ~azure.synapse.artifacts.models.DatasetLocation - :param encoding_name: The code page name of the preferred encoding. If not specified, the - default value is UTF-8, unless BOM denotes another Unicode encoding. Refer to the name column - of the table in the following link to set supported values: - https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with + :param connection_string: An ODBC connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param pwd: The Azure key vault secret reference of password in connection string. + :type pwd: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encoding_name: object - :param compression: The data compression method used for the json dataset. - :type compression: ~azure.synapse.artifacts.models.DatasetCompression + :type encrypted_credential: object """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'encoding_name': {'key': 'typeProperties.encodingName', 'type': 'object'}, - 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, **kwargs ): - super(JsonDataset, self).__init__(**kwargs) - self.type = 'Json' # type: str - self.location = kwargs.get('location', None) - self.encoding_name = kwargs.get('encoding_name', None) - self.compression = kwargs.get('compression', None) - + super(NetezzaLinkedService, self).__init__(**kwargs) + self.type = 'Netezza' # type: str + self.connection_string = kwargs.get('connection_string', None) + self.pwd = kwargs.get('pwd', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) -class LinkedServiceDebugResource(SubResourceDebugResource): - """Linked service debug resource. - All required parameters must be populated in order to send to Azure. +class NetezzaPartitionSettings(msrest.serialization.Model): + """The settings that will be leveraged for Netezza source partitioning. - :param name: The resource name. - :type name: str - :param properties: Required. Properties of linked service. - :type properties: ~azure.synapse.artifacts.models.LinkedService + :param partition_column_name: The name of the column in integer type that will be used for + proceeding range partitioning. Type: string (or Expression with resultType string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of column specified in partitionColumnName that + will be used for proceeding range partitioning. Type: string (or Expression with resultType + string). + :type partition_upper_bound: object + :param partition_lower_bound: The minimum value of column specified in partitionColumnName that + will be used for proceeding range partitioning. Type: string (or Expression with resultType + string). + :type partition_lower_bound: object """ - _validation = { - 'properties': {'required': True}, - } - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'LinkedService'}, + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, } def __init__( self, **kwargs ): - super(LinkedServiceDebugResource, self).__init__(**kwargs) - self.properties = kwargs['properties'] + super(NetezzaPartitionSettings, self).__init__(**kwargs) + self.partition_column_name = kwargs.get('partition_column_name', None) + self.partition_upper_bound = kwargs.get('partition_upper_bound', None) + self.partition_lower_bound = kwargs.get('partition_lower_bound', None) -class LinkedServiceListResponse(msrest.serialization.Model): - """A list of linked service resources. +class NetezzaSource(TabularSource): + """A copy activity Netezza source. All required parameters must be populated in order to send to Azure. - :param value: Required. List of linked services. - :type value: list[~azure.synapse.artifacts.models.LinkedServiceResource] - :param next_link: The link to the next page of results, if any remaining results exist. - :type next_link: str + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + :param partition_option: The partition mechanism that will be used for Netezza read in + parallel. Possible values include: "None", "DataSlice", "DynamicRange". + :type partition_option: str or ~azure.synapse.artifacts.models.NetezzaPartitionOption + :param partition_settings: The settings that will be leveraged for Netezza source partitioning. + :type partition_settings: ~azure.synapse.artifacts.models.NetezzaPartitionSettings """ _validation = { - 'value': {'required': True}, + 'type': {'required': True}, } _attribute_map = { - 'value': {'key': 'value', 'type': '[LinkedServiceResource]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'NetezzaPartitionSettings'}, } def __init__( self, **kwargs ): - super(LinkedServiceListResponse, self).__init__(**kwargs) - self.value = kwargs['value'] - self.next_link = kwargs.get('next_link', None) + super(NetezzaSource, self).__init__(**kwargs) + self.type = 'NetezzaSource' # type: str + self.query = kwargs.get('query', None) + self.partition_option = kwargs.get('partition_option', None) + self.partition_settings = kwargs.get('partition_settings', None) -class LinkedServiceReference(msrest.serialization.Model): - """Linked service reference type. +class NetezzaTableDataset(Dataset): + """Netezza dataset. All required parameters must be populated in order to send to Azure. - :param type: Required. Linked service reference type. Possible values include: - "LinkedServiceReference". - :type type: str or ~azure.synapse.artifacts.models.Type - :param reference_name: Required. Reference LinkedService name. - :type reference_name: str - :param parameters: Arguments for LinkedService. - :type parameters: dict[str, object] + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: This property will be retired. Please consider using schema + table + properties instead. + :type table_name: object + :param table: The table name of the Netezza. Type: string (or Expression with resultType + string). + :type table: object + :param schema_type_properties_schema: The schema name of the Netezza. Type: string (or + Expression with resultType string). + :type schema_type_properties_schema: object """ _validation = { 'type': {'required': True}, - 'reference_name': {'required': True}, + 'linked_service_name': {'required': True}, } _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'reference_name': {'key': 'referenceName', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } def __init__( self, **kwargs ): - super(LinkedServiceReference, self).__init__(**kwargs) - self.type = kwargs['type'] - self.reference_name = kwargs['reference_name'] - self.parameters = kwargs.get('parameters', None) - + super(NetezzaTableDataset, self).__init__(**kwargs) + self.type = 'NetezzaTable' # type: str + self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) -class LinkedServiceResource(SubResource): - """Linked service resource type. - Variables are only populated by the server, and will be ignored when sending a request. +class Notebook(msrest.serialization.Model): + """Notebook. All required parameters must be populated in order to send to Azure. - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - :param properties: Required. Properties of linked service. - :type properties: ~azure.synapse.artifacts.models.LinkedService + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param description: The description of the notebook. + :type description: str + :param big_data_pool: Big data pool reference. + :type big_data_pool: ~azure.synapse.artifacts.models.BigDataPoolReference + :param session_properties: Session properties. + :type session_properties: ~azure.synapse.artifacts.models.NotebookSessionProperties + :param metadata: Required. Notebook root-level metadata. + :type metadata: ~azure.synapse.artifacts.models.NotebookMetadata + :param nbformat: Required. Notebook format (major number). Incremented between backwards + incompatible changes to the notebook format. + :type nbformat: int + :param nbformat_minor: Required. Notebook format (minor number). Incremented for backward + compatible changes to the notebook format. + :type nbformat_minor: int + :param cells: Required. Array of cells of the current notebook. + :type cells: list[~azure.synapse.artifacts.models.NotebookCell] """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'properties': {'required': True}, + 'metadata': {'required': True}, + 'nbformat': {'required': True}, + 'nbformat_minor': {'required': True}, + 'cells': {'required': True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'LinkedService'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'big_data_pool': {'key': 'bigDataPool', 'type': 'BigDataPoolReference'}, + 'session_properties': {'key': 'sessionProperties', 'type': 'NotebookSessionProperties'}, + 'metadata': {'key': 'metadata', 'type': 'NotebookMetadata'}, + 'nbformat': {'key': 'nbformat', 'type': 'int'}, + 'nbformat_minor': {'key': 'nbformat_minor', 'type': 'int'}, + 'cells': {'key': 'cells', 'type': '[NotebookCell]'}, } def __init__( self, **kwargs ): - super(LinkedServiceResource, self).__init__(**kwargs) - self.properties = kwargs['properties'] + super(Notebook, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.description = kwargs.get('description', None) + self.big_data_pool = kwargs.get('big_data_pool', None) + self.session_properties = kwargs.get('session_properties', None) + self.metadata = kwargs['metadata'] + self.nbformat = kwargs['nbformat'] + self.nbformat_minor = kwargs['nbformat_minor'] + self.cells = kwargs['cells'] -class LogStorageSettings(msrest.serialization.Model): - """Log storage settings. +class NotebookCell(msrest.serialization.Model): + """Notebook cell. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param linked_service_name: Required. Log storage linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param path: The path to storage for storing detailed logs of activity execution. Type: string - (or Expression with resultType string). - :type path: object + :param cell_type: Required. String identifying the type of cell. + :type cell_type: str + :param metadata: Required. Cell-level metadata. + :type metadata: object + :param source: Required. Contents of the cell, represented as an array of lines. + :type source: list[str] + :param attachments: Attachments associated with the cell. + :type attachments: object + :param outputs: Cell-level output items. + :type outputs: list[~azure.synapse.artifacts.models.NotebookCellOutputItem] """ _validation = { - 'linked_service_name': {'required': True}, + 'cell_type': {'required': True}, + 'metadata': {'required': True}, + 'source': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'path': {'key': 'path', 'type': 'object'}, + 'cell_type': {'key': 'cell_type', 'type': 'str'}, + 'metadata': {'key': 'metadata', 'type': 'object'}, + 'source': {'key': 'source', 'type': '[str]'}, + 'attachments': {'key': 'attachments', 'type': 'object'}, + 'outputs': {'key': 'outputs', 'type': '[NotebookCellOutputItem]'}, } def __init__( self, **kwargs ): - super(LogStorageSettings, self).__init__(**kwargs) + super(NotebookCell, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) - self.linked_service_name = kwargs['linked_service_name'] - self.path = kwargs.get('path', None) + self.cell_type = kwargs['cell_type'] + self.metadata = kwargs['metadata'] + self.source = kwargs['source'] + self.attachments = kwargs.get('attachments', None) + self.outputs = kwargs.get('outputs', None) -class LookupActivity(ExecutionActivity): - """Lookup activity. +class NotebookCellOutputItem(msrest.serialization.Model): + """An item of the notebook cell execution output. All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param name: Required. Activity name. + :param name: For output_type=stream, determines the name of stream (stdout / stderr). :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param source: Required. Dataset-specific source properties, same as copy activity source. - :type source: ~azure.synapse.artifacts.models.CopySource - :param dataset: Required. Lookup activity dataset reference. - :type dataset: ~azure.synapse.artifacts.models.DatasetReference - :param first_row_only: Whether to return first row or all rows. Default value is true. Type: - boolean (or Expression with resultType boolean). - :type first_row_only: object + :param execution_count: Execution sequence number. + :type execution_count: int + :param output_type: Required. Execution, display, or stream outputs. Possible values include: + "execute_result", "display_data", "stream", "error". + :type output_type: str or ~azure.synapse.artifacts.models.CellOutputType + :param text: For output_type=stream, the stream's text output, represented as a string or an + array of strings. + :type text: object + :param data: Output data. Use MIME type as key, and content as value. + :type data: object + :param metadata: Metadata for the output item. + :type metadata: object """ _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'source': {'required': True}, - 'dataset': {'required': True}, + 'output_type': {'required': True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'source': {'key': 'typeProperties.source', 'type': 'CopySource'}, - 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, - 'first_row_only': {'key': 'typeProperties.firstRowOnly', 'type': 'object'}, + 'execution_count': {'key': 'execution_count', 'type': 'int'}, + 'output_type': {'key': 'output_type', 'type': 'str'}, + 'text': {'key': 'text', 'type': 'object'}, + 'data': {'key': 'data', 'type': 'object'}, + 'metadata': {'key': 'metadata', 'type': 'object'}, } def __init__( self, **kwargs ): - super(LookupActivity, self).__init__(**kwargs) - self.type = 'Lookup' # type: str - self.source = kwargs['source'] - self.dataset = kwargs['dataset'] - self.first_row_only = kwargs.get('first_row_only', None) + super(NotebookCellOutputItem, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.execution_count = kwargs.get('execution_count', None) + self.output_type = kwargs['output_type'] + self.text = kwargs.get('text', None) + self.data = kwargs.get('data', None) + self.metadata = kwargs.get('metadata', None) -class MagentoLinkedService(LinkedService): - """Magento server linked service. +class NotebookKernelSpec(msrest.serialization.Model): + """Kernel information. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param host: Required. The URL of the Magento instance. (i.e. 192.168.222.110/magento3). - :type host: object - :param access_token: The access token from Magento. - :type access_token: ~azure.synapse.artifacts.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using - HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name in the server's - certificate to match the host name of the server when connecting over SSL. The default value is - true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of the server when - connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param name: Required. Name of the kernel specification. + :type name: str + :param display_name: Required. Name to display in UI. + :type display_name: str """ _validation = { - 'type': {'required': True}, - 'host': {'required': True}, + 'name': {'required': True}, + 'display_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'name': {'key': 'name', 'type': 'str'}, + 'display_name': {'key': 'display_name', 'type': 'str'}, } def __init__( self, **kwargs ): - super(MagentoLinkedService, self).__init__(**kwargs) - self.type = 'Magento' # type: str - self.host = kwargs['host'] - self.access_token = kwargs.get('access_token', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(NotebookKernelSpec, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.name = kwargs['name'] + self.display_name = kwargs['display_name'] -class MagentoObjectDataset(Dataset): - """Magento server dataset. +class NotebookLanguageInfo(msrest.serialization.Model): + """Language info. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :param name: Required. The programming language which this kernel runs. + :type name: str + :param codemirror_mode: The codemirror mode to use for code in this language. + :type codemirror_mode: str """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + 'name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'name': {'key': 'name', 'type': 'str'}, + 'codemirror_mode': {'key': 'codemirror_mode', 'type': 'str'}, } def __init__( self, **kwargs ): - super(MagentoObjectDataset, self).__init__(**kwargs) - self.type = 'MagentoObject' # type: str - self.table_name = kwargs.get('table_name', None) - - -class MappingDataFlow(DataFlow): - """Mapping data flow. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Type of data flow.Constant filled by server. - :type type: str - :param description: The description of the data flow. - :type description: str - :param annotations: List of tags that can be used for describing the data flow. - :type annotations: list[object] - :param folder: The folder that this data flow is in. If not specified, Data flow will appear at - the root level. - :type folder: ~azure.synapse.artifacts.models.DataFlowFolder - :param sources: List of sources in data flow. - :type sources: list[~azure.synapse.artifacts.models.DataFlowSource] - :param sinks: List of sinks in data flow. - :type sinks: list[~azure.synapse.artifacts.models.DataFlowSink] - :param transformations: List of transformations in data flow. - :type transformations: list[~azure.synapse.artifacts.models.Transformation] - :param script: DataFlow script. - :type script: str + super(NotebookLanguageInfo, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.name = kwargs['name'] + self.codemirror_mode = kwargs.get('codemirror_mode', None) + + +class NotebookListResponse(msrest.serialization.Model): + """A list of Notebook resources. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of Notebooks. + :type value: list[~azure.synapse.artifacts.models.NotebookResource] + :param next_link: The link to the next page of results, if any remaining results exist. + :type next_link: str """ _validation = { - 'type': {'required': True}, + 'value': {'required': True}, } _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DataFlowFolder'}, - 'sources': {'key': 'typeProperties.sources', 'type': '[DataFlowSource]'}, - 'sinks': {'key': 'typeProperties.sinks', 'type': '[DataFlowSink]'}, - 'transformations': {'key': 'typeProperties.transformations', 'type': '[Transformation]'}, - 'script': {'key': 'typeProperties.script', 'type': 'str'}, + 'value': {'key': 'value', 'type': '[NotebookResource]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, } def __init__( self, **kwargs ): - super(MappingDataFlow, self).__init__(**kwargs) - self.type = 'MappingDataFlow' # type: str - self.sources = kwargs.get('sources', None) - self.sinks = kwargs.get('sinks', None) - self.transformations = kwargs.get('transformations', None) - self.script = kwargs.get('script', None) - + super(NotebookListResponse, self).__init__(**kwargs) + self.value = kwargs['value'] + self.next_link = kwargs.get('next_link', None) -class MariaDBLinkedService(LinkedService): - """MariaDB server linked service. - All required parameters must be populated in order to send to Azure. +class NotebookMetadata(msrest.serialization.Model): + """Notebook root-level metadata. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: An ODBC connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param pwd: The Azure key vault secret reference of password in connection string. - :type pwd: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param kernelspec: Kernel information. + :type kernelspec: ~azure.synapse.artifacts.models.NotebookKernelSpec + :param language_info: Language info. + :type language_info: ~azure.synapse.artifacts.models.NotebookLanguageInfo + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'kernelspec': {'key': 'kernelspec', 'type': 'NotebookKernelSpec'}, + 'language_info': {'key': 'language_info', 'type': 'NotebookLanguageInfo'}, + } + + def __init__( + self, + **kwargs + ): + super(NotebookMetadata, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.kernelspec = kwargs.get('kernelspec', None) + self.language_info = kwargs.get('language_info', None) + + +class NotebookResource(AzureEntityResource): + """Notebook resource type. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + :ivar etag: Resource Etag. + :vartype etag: str + :param properties: Required. Properties of Notebook. + :type properties: ~azure.synapse.artifacts.models.Notebook """ _validation = { - 'type': {'required': True}, + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'Notebook'}, } def __init__( self, **kwargs ): - super(MariaDBLinkedService, self).__init__(**kwargs) - self.type = 'MariaDB' # type: str - self.connection_string = kwargs.get('connection_string', None) - self.pwd = kwargs.get('pwd', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(NotebookResource, self).__init__(**kwargs) + self.properties = kwargs['properties'] -class MariaDBTableDataset(Dataset): - """MariaDB server dataset. +class NotebookSessionProperties(msrest.serialization.Model): + """Session properties. All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :param driver_memory: Required. Amount of memory to use for the driver process. + :type driver_memory: str + :param driver_cores: Required. Number of cores to use for the driver. + :type driver_cores: int + :param executor_memory: Required. Amount of memory to use per executor process. + :type executor_memory: str + :param executor_cores: Required. Number of cores to use for each executor. + :type executor_cores: int + :param num_executors: Required. Number of executors to launch for this session. + :type num_executors: int """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + 'driver_memory': {'required': True}, + 'driver_cores': {'required': True}, + 'executor_memory': {'required': True}, + 'executor_cores': {'required': True}, + 'num_executors': {'required': True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'driver_memory': {'key': 'driverMemory', 'type': 'str'}, + 'driver_cores': {'key': 'driverCores', 'type': 'int'}, + 'executor_memory': {'key': 'executorMemory', 'type': 'str'}, + 'executor_cores': {'key': 'executorCores', 'type': 'int'}, + 'num_executors': {'key': 'numExecutors', 'type': 'int'}, } def __init__( self, **kwargs ): - super(MariaDBTableDataset, self).__init__(**kwargs) - self.type = 'MariaDBTable' # type: str - self.table_name = kwargs.get('table_name', None) + super(NotebookSessionProperties, self).__init__(**kwargs) + self.driver_memory = kwargs['driver_memory'] + self.driver_cores = kwargs['driver_cores'] + self.executor_memory = kwargs['executor_memory'] + self.executor_cores = kwargs['executor_cores'] + self.num_executors = kwargs['num_executors'] -class MarketoLinkedService(LinkedService): - """Marketo server linked service. +class ODataLinkedService(LinkedService): + """Open Data Protocol (OData) linked service. All required parameters must be populated in order to send to Azure. @@ -11131,22 +19429,42 @@ class MarketoLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param endpoint: Required. The endpoint of the Marketo server. (i.e. 123-ABC-321.mktorest.com). - :type endpoint: object - :param client_id: Required. The client Id of your Marketo service. - :type client_id: object - :param client_secret: The client secret of your Marketo service. - :type client_secret: ~azure.synapse.artifacts.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using - HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name in the server's - certificate to match the host name of the server when connecting over SSL. The default value is - true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of the server when - connecting over SSL. The default value is true. - :type use_peer_verification: object + :param url: Required. The URL of the OData service endpoint. Type: string (or Expression with + resultType string). + :type url: object + :param authentication_type: Type of authentication used to connect to the OData service. + Possible values include: "Basic", "Anonymous", "Windows", "AadServicePrincipal", + "ManagedServiceIdentity". + :type authentication_type: str or ~azure.synapse.artifacts.models.ODataAuthenticationType + :param user_name: User name of the OData service. Type: string (or Expression with resultType + string). + :type user_name: object + :param password: Password of the OData service. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param tenant: Specify the tenant information (domain name or tenant ID) under which your + application resides. Type: string (or Expression with resultType string). + :type tenant: object + :param service_principal_id: Specify the application id of your application registered in Azure + Active Directory. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param aad_resource_id: Specify the resource you are requesting authorization to use Directory. + Type: string (or Expression with resultType string). + :type aad_resource_id: object + :param aad_service_principal_credential_type: Specify the credential type (key or cert) is used + for service principal. Possible values include: "ServicePrincipalKey", "ServicePrincipalCert". + :type aad_service_principal_credential_type: str or + ~azure.synapse.artifacts.models.ODataAadServicePrincipalCredentialType + :param service_principal_key: Specify the secret of your application registered in Azure Active + Directory. Type: string (or Expression with resultType string). + :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase + :param service_principal_embedded_cert: Specify the base64 encoded certificate of your + application registered in Azure Active Directory. Type: string (or Expression with resultType + string). + :type service_principal_embedded_cert: ~azure.synapse.artifacts.models.SecretBase + :param service_principal_embedded_cert_password: Specify the password of your certificate if + your certificate has a password and you are using AadServicePrincipal authentication. Type: + string (or Expression with resultType string). + :type service_principal_embedded_cert_password: ~azure.synapse.artifacts.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -11155,8 +19473,7 @@ class MarketoLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'endpoint': {'required': True}, - 'client_id': {'required': True}, + 'url': {'required': True}, } _attribute_map = { @@ -11166,12 +19483,17 @@ class MarketoLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, + 'aad_service_principal_credential_type': {'key': 'typeProperties.aadServicePrincipalCredentialType', 'type': 'str'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'service_principal_embedded_cert': {'key': 'typeProperties.servicePrincipalEmbeddedCert', 'type': 'SecretBase'}, + 'service_principal_embedded_cert_password': {'key': 'typeProperties.servicePrincipalEmbeddedCertPassword', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -11179,19 +19501,24 @@ def __init__( self, **kwargs ): - super(MarketoLinkedService, self).__init__(**kwargs) - self.type = 'Marketo' # type: str - self.endpoint = kwargs['endpoint'] - self.client_id = kwargs['client_id'] - self.client_secret = kwargs.get('client_secret', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) + super(ODataLinkedService, self).__init__(**kwargs) + self.type = 'OData' # type: str + self.url = kwargs['url'] + self.authentication_type = kwargs.get('authentication_type', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.tenant = kwargs.get('tenant', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.aad_resource_id = kwargs.get('aad_resource_id', None) + self.aad_service_principal_credential_type = kwargs.get('aad_service_principal_credential_type', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.service_principal_embedded_cert = kwargs.get('service_principal_embedded_cert', None) + self.service_principal_embedded_cert_password = kwargs.get('service_principal_embedded_cert_password', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) -class MarketoObjectDataset(Dataset): - """Marketo server dataset. +class ODataResourceDataset(Dataset): + """The Open Data Protocol (OData) resource dataset. All required parameters must be populated in order to send to Azure. @@ -11217,8 +19544,8 @@ class MarketoObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :param path: The OData resource path. Type: string (or Expression with resultType string). + :type path: object """ _validation = { @@ -11236,20 +19563,66 @@ class MarketoObjectDataset(Dataset): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'path': {'key': 'typeProperties.path', 'type': 'object'}, } def __init__( self, **kwargs ): - super(MarketoObjectDataset, self).__init__(**kwargs) - self.type = 'MarketoObject' # type: str - self.table_name = kwargs.get('table_name', None) + super(ODataResourceDataset, self).__init__(**kwargs) + self.type = 'ODataResource' # type: str + self.path = kwargs.get('path', None) -class MicrosoftAccessLinkedService(LinkedService): - """Microsoft Access linked service. +class ODataSource(CopySource): + """A copy activity source for OData source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query: OData query. For example, "$top=1". Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(ODataSource, self).__init__(**kwargs) + self.type = 'ODataSource' # type: str + self.query = kwargs.get('query', None) + + +class OdbcLinkedService(LinkedService): + """Open Database Connectivity (ODBC) linked service. All required parameters must be populated in order to send to Azure. @@ -11270,9 +19643,8 @@ class MicrosoftAccessLinkedService(LinkedService): as well as an optional encrypted credential. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object - :param authentication_type: Type of authentication used to connect to the Microsoft Access as - ODBC data store. Possible values are: Anonymous and Basic. Type: string (or Expression with - resultType string). + :param authentication_type: Type of authentication used to connect to the ODBC data store. + Possible values are: Anonymous and Basic. Type: string (or Expression with resultType string). :type authentication_type: object :param credential: The access credential portion of the connection string specified in driver- specific property-value format. @@ -11312,8 +19684,8 @@ def __init__( self, **kwargs ): - super(MicrosoftAccessLinkedService, self).__init__(**kwargs) - self.type = 'MicrosoftAccess' # type: str + super(OdbcLinkedService, self).__init__(**kwargs) + self.type = 'Odbc' # type: str self.connection_string = kwargs['connection_string'] self.authentication_type = kwargs.get('authentication_type', None) self.credential = kwargs.get('credential', None) @@ -11322,220 +19694,111 @@ def __init__( self.encrypted_credential = kwargs.get('encrypted_credential', None) -class MicrosoftAccessTableDataset(Dataset): - """The Microsoft Access table dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The Microsoft Access table name. Type: string (or Expression with resultType - string). - :type table_name: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(MicrosoftAccessTableDataset, self).__init__(**kwargs) - self.type = 'MicrosoftAccessTable' # type: str - self.table_name = kwargs.get('table_name', None) - - -class MongoDbCollectionDataset(Dataset): - """The MongoDB database dataset. +class OdbcSink(CopySink): + """A copy activity ODBC sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Copy sink type.Constant filled by server. :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param collection_name: Required. The table name of the MongoDB database. Type: string (or + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param pre_copy_script: A query to execute before starting the copy. Type: string (or Expression with resultType string). - :type collection_name: object + :type pre_copy_script: object """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, - 'collection_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'collection_name': {'key': 'typeProperties.collectionName', 'type': 'object'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } def __init__( self, **kwargs ): - super(MongoDbCollectionDataset, self).__init__(**kwargs) - self.type = 'MongoDbCollection' # type: str - self.collection_name = kwargs['collection_name'] + super(OdbcSink, self).__init__(**kwargs) + self.type = 'OdbcSink' # type: str + self.pre_copy_script = kwargs.get('pre_copy_script', None) -class MongoDbLinkedService(LinkedService): - """Linked service for MongoDb data source. +class OdbcSource(TabularSource): + """A copy activity source for ODBC databases. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param server: Required. The IP address or server name of the MongoDB server. Type: string (or - Expression with resultType string). - :type server: object - :param authentication_type: The authentication type to be used to connect to the MongoDB - database. Possible values include: "Basic", "Anonymous". - :type authentication_type: str or ~azure.synapse.artifacts.models.MongoDbAuthenticationType - :param database_name: Required. The name of the MongoDB database that you want to access. Type: - string (or Expression with resultType string). - :type database_name: object - :param username: Username for authentication. Type: string (or Expression with resultType - string). - :type username: object - :param password: Password for authentication. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param auth_source: Database to verify the username and password. Type: string (or Expression - with resultType string). - :type auth_source: object - :param port: The TCP port number that the MongoDB server uses to listen for client connections. - The default value is 27017. Type: integer (or Expression with resultType integer), minimum: 0. - :type port: object - :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The - default value is false. Type: boolean (or Expression with resultType boolean). - :type enable_ssl: object - :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from - the server. The default value is false. Type: boolean (or Expression with resultType boolean). - :type allow_self_signed_server_cert: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: Database query. Type: string (or Expression with resultType string). + :type query: object """ _validation = { 'type': {'required': True}, - 'server': {'required': True}, - 'database_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'database_name': {'key': 'typeProperties.databaseName', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'auth_source': {'key': 'typeProperties.authSource', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, } def __init__( self, **kwargs ): - super(MongoDbLinkedService, self).__init__(**kwargs) - self.type = 'MongoDb' # type: str - self.server = kwargs['server'] - self.authentication_type = kwargs.get('authentication_type', None) - self.database_name = kwargs['database_name'] - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.auth_source = kwargs.get('auth_source', None) - self.port = kwargs.get('port', None) - self.enable_ssl = kwargs.get('enable_ssl', None) - self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(OdbcSource, self).__init__(**kwargs) + self.type = 'OdbcSource' # type: str + self.query = kwargs.get('query', None) -class MongoDbV2CollectionDataset(Dataset): - """The MongoDB database dataset. +class OdbcTableDataset(Dataset): + """The ODBC table dataset. All required parameters must be populated in order to send to Azure. @@ -11561,15 +19824,13 @@ class MongoDbV2CollectionDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param collection: Required. The collection name of the MongoDB database. Type: string (or - Expression with resultType string). - :type collection: object + :param table_name: The ODBC table name. Type: string (or Expression with resultType string). + :type table_name: object """ _validation = { 'type': {'required': True}, 'linked_service_name': {'required': True}, - 'collection': {'required': True}, } _attribute_map = { @@ -11582,172 +19843,227 @@ class MongoDbV2CollectionDataset(Dataset): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } def __init__( self, **kwargs ): - super(MongoDbV2CollectionDataset, self).__init__(**kwargs) - self.type = 'MongoDbV2Collection' # type: str - self.collection = kwargs['collection'] + super(OdbcTableDataset, self).__init__(**kwargs) + self.type = 'OdbcTable' # type: str + self.table_name = kwargs.get('table_name', None) -class MongoDbV2LinkedService(LinkedService): - """Linked service for MongoDB data source. +class Office365Dataset(Dataset): + """The Office365 account. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Type of dataset.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. + :param description: Dataset description. :type description: str - :param parameters: Parameters for linked service. + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. + :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] - :param connection_string: Required. The MongoDB connection string. Type: string, SecureString - or AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param database: Required. The name of the MongoDB database that you want to access. Type: - string (or Expression with resultType string). - :type database: object + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: Required. Name of the dataset to extract from Office 365. Type: string (or + Expression with resultType string). + :type table_name: object + :param predicate: A predicate expression that can be used to filter the specific rows to + extract from Office 365. Type: string (or Expression with resultType string). + :type predicate: object """ _validation = { 'type': {'required': True}, - 'connection_string': {'required': True}, - 'database': {'required': True}, + 'linked_service_name': {'required': True}, + 'table_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'predicate': {'key': 'typeProperties.predicate', 'type': 'object'}, } def __init__( self, **kwargs ): - super(MongoDbV2LinkedService, self).__init__(**kwargs) - self.type = 'MongoDbV2' # type: str - self.connection_string = kwargs['connection_string'] - self.database = kwargs['database'] - - -class Trigger(msrest.serialization.Model): - """Azure Synapse nested object which contains information about creating pipeline run. + super(Office365Dataset, self).__init__(**kwargs) + self.type = 'Office365Table' # type: str + self.table_name = kwargs['table_name'] + self.predicate = kwargs.get('predicate', None) - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: MultiplePipelineTrigger, RerunTumblingWindowTrigger. - Variables are only populated by the server, and will be ignored when sending a request. +class Office365LinkedService(LinkedService): + """Office365 linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Trigger type.Constant filled by server. + :param type: Required. Type of linked service.Constant filled by server. :type type: str - :param description: Trigger description. + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. :type description: str - :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are - called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". - :vartype runtime_state: str or ~azure.synapse.artifacts.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the trigger. + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] + :param office365_tenant_id: Required. Azure tenant ID to which the Office 365 account belongs. + Type: string (or Expression with resultType string). + :type office365_tenant_id: object + :param service_principal_tenant_id: Required. Specify the tenant information under which your + Azure AD web application resides. Type: string (or Expression with resultType string). + :type service_principal_tenant_id: object + :param service_principal_id: Required. Specify the application's client ID. Type: string (or + Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: Required. Specify the application's key. + :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object """ _validation = { 'type': {'required': True}, - 'runtime_state': {'readonly': True}, + 'office365_tenant_id': {'required': True}, + 'service_principal_tenant_id': {'required': True}, + 'service_principal_id': {'required': True}, + 'service_principal_key': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, - 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - } - - _subtype_map = { - 'type': {'MultiplePipelineTrigger': 'MultiplePipelineTrigger', 'RerunTumblingWindowTrigger': 'RerunTumblingWindowTrigger'} + 'office365_tenant_id': {'key': 'typeProperties.office365TenantId', 'type': 'object'}, + 'service_principal_tenant_id': {'key': 'typeProperties.servicePrincipalTenantId', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, **kwargs ): - super(Trigger, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.type = 'Trigger' # type: str - self.description = kwargs.get('description', None) - self.runtime_state = None - self.annotations = kwargs.get('annotations', None) - + super(Office365LinkedService, self).__init__(**kwargs) + self.type = 'Office365' # type: str + self.office365_tenant_id = kwargs['office365_tenant_id'] + self.service_principal_tenant_id = kwargs['service_principal_tenant_id'] + self.service_principal_id = kwargs['service_principal_id'] + self.service_principal_key = kwargs['service_principal_key'] + self.encrypted_credential = kwargs.get('encrypted_credential', None) -class MultiplePipelineTrigger(Trigger): - """Base class for all triggers that support one to many model for trigger to pipeline. - Variables are only populated by the server, and will be ignored when sending a request. +class Office365Source(CopySource): + """A copy activity source for an Office 365 service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Trigger type.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param description: Trigger description. - :type description: str - :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are - called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". - :vartype runtime_state: str or ~azure.synapse.artifacts.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the trigger. - :type annotations: list[object] - :param pipelines: Pipelines that need to be started. - :type pipelines: list[~azure.synapse.artifacts.models.TriggerPipelineReference] + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param allowed_groups: The groups containing all the users. Type: array of strings (or + Expression with resultType array of strings). + :type allowed_groups: object + :param user_scope_filter_uri: The user scope uri. Type: string (or Expression with resultType + string). + :type user_scope_filter_uri: object + :param date_filter_column: The Column to apply the :code:`` and + :code:``. Type: string (or Expression with resultType string). + :type date_filter_column: object + :param start_time: Start time of the requested range for this dataset. Type: string (or + Expression with resultType string). + :type start_time: object + :param end_time: End time of the requested range for this dataset. Type: string (or Expression + with resultType string). + :type end_time: object + :param output_columns: The columns to be read out from the Office 365 table. Type: array of + objects (or Expression with resultType array of objects). Example: [ { "name": "Id" }, { + "name": "CreatedDateTime" } ]. + :type output_columns: object """ _validation = { 'type': {'required': True}, - 'runtime_state': {'readonly': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'allowed_groups': {'key': 'allowedGroups', 'type': 'object'}, + 'user_scope_filter_uri': {'key': 'userScopeFilterUri', 'type': 'object'}, + 'date_filter_column': {'key': 'dateFilterColumn', 'type': 'object'}, + 'start_time': {'key': 'startTime', 'type': 'object'}, + 'end_time': {'key': 'endTime', 'type': 'object'}, + 'output_columns': {'key': 'outputColumns', 'type': 'object'}, } def __init__( self, **kwargs ): - super(MultiplePipelineTrigger, self).__init__(**kwargs) - self.type = 'MultiplePipelineTrigger' # type: str - self.pipelines = kwargs.get('pipelines', None) + super(Office365Source, self).__init__(**kwargs) + self.type = 'Office365Source' # type: str + self.allowed_groups = kwargs.get('allowed_groups', None) + self.user_scope_filter_uri = kwargs.get('user_scope_filter_uri', None) + self.date_filter_column = kwargs.get('date_filter_column', None) + self.start_time = kwargs.get('start_time', None) + self.end_time = kwargs.get('end_time', None) + self.output_columns = kwargs.get('output_columns', None) -class MySqlLinkedService(LinkedService): - """Linked service for MySQL data source. +class OracleLinkedService(LinkedService): + """Oracle database. All required parameters must be populated in order to send to Azure. @@ -11764,7 +20080,8 @@ class MySqlLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param connection_string: Required. The connection string. + :param connection_string: Required. The connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. :type connection_string: object :param password: The Azure key vault secret reference of password in connection string. :type password: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference @@ -11795,73 +20112,51 @@ def __init__( self, **kwargs ): - super(MySqlLinkedService, self).__init__(**kwargs) - self.type = 'MySql' # type: str + super(OracleLinkedService, self).__init__(**kwargs) + self.type = 'Oracle' # type: str self.connection_string = kwargs['connection_string'] self.password = kwargs.get('password', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) -class MySqlTableDataset(Dataset): - """The MySQL table dataset. - - All required parameters must be populated in order to send to Azure. +class OraclePartitionSettings(msrest.serialization.Model): + """The settings that will be leveraged for Oracle source partitioning. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The MySQL table name. Type: string (or Expression with resultType string). - :type table_name: object + :param partition_names: Names of the physical partitions of Oracle table. + :type partition_names: object + :param partition_column_name: The name of the column in integer type that will be used for + proceeding range partitioning. Type: string (or Expression with resultType string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of column specified in partitionColumnName that + will be used for proceeding range partitioning. Type: string (or Expression with resultType + string). + :type partition_upper_bound: object + :param partition_lower_bound: The minimum value of column specified in partitionColumnName that + will be used for proceeding range partitioning. Type: string (or Expression with resultType + string). + :type partition_lower_bound: object """ - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'partition_names': {'key': 'partitionNames', 'type': 'object'}, + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, } def __init__( self, **kwargs ): - super(MySqlTableDataset, self).__init__(**kwargs) - self.type = 'MySqlTable' # type: str - self.table_name = kwargs.get('table_name', None) + super(OraclePartitionSettings, self).__init__(**kwargs) + self.partition_names = kwargs.get('partition_names', None) + self.partition_column_name = kwargs.get('partition_column_name', None) + self.partition_upper_bound = kwargs.get('partition_upper_bound', None) + self.partition_lower_bound = kwargs.get('partition_lower_bound', None) -class NetezzaLinkedService(LinkedService): - """Netezza linked service. +class OracleServiceCloudLinkedService(LinkedService): + """Oracle Service Cloud linked service. All required parameters must be populated in order to send to Azure. @@ -11878,11 +20173,24 @@ class NetezzaLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param connection_string: An ODBC connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param pwd: The Azure key vault secret reference of password in connection string. - :type pwd: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param host: Required. The URL of the Oracle Service Cloud instance. + :type host: object + :param username: Required. The user name that you use to access Oracle Service Cloud server. + :type username: object + :param password: Required. The password corresponding to the user name that you provided in the + username key. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using + HTTPS. The default value is true. Type: boolean (or Expression with resultType boolean). + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name in the server's + certificate to match the host name of the server when connecting over SSL. The default value is + true. Type: boolean (or Expression with resultType boolean). + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of the server when + connecting over SSL. The default value is true. Type: boolean (or Expression with resultType + boolean). + :type use_peer_verification: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -11891,6 +20199,9 @@ class NetezzaLinkedService(LinkedService): _validation = { 'type': {'required': True}, + 'host': {'required': True}, + 'username': {'required': True}, + 'password': {'required': True}, } _attribute_map = { @@ -11900,8 +20211,12 @@ class NetezzaLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -11909,15 +20224,19 @@ def __init__( self, **kwargs ): - super(NetezzaLinkedService, self).__init__(**kwargs) - self.type = 'Netezza' # type: str - self.connection_string = kwargs.get('connection_string', None) - self.pwd = kwargs.get('pwd', None) + super(OracleServiceCloudLinkedService, self).__init__(**kwargs) + self.type = 'OracleServiceCloud' # type: str + self.host = kwargs['host'] + self.username = kwargs['username'] + self.password = kwargs['password'] + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) -class NetezzaTableDataset(Dataset): - """Netezza dataset. +class OracleServiceCloudObjectDataset(Dataset): + """Oracle Service Cloud dataset. All required parameters must be populated in order to send to Azure. @@ -11943,15 +20262,8 @@ class NetezzaTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table - properties instead. + :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object - :param table: The table name of the Netezza. Type: string (or Expression with resultType - string). - :type table: object - :param schema_type_properties_schema: The schema name of the Netezza. Type: string (or - Expression with resultType string). - :type schema_type_properties_schema: object """ _validation = { @@ -11970,497 +20282,477 @@ class NetezzaTableDataset(Dataset): 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } def __init__( self, **kwargs ): - super(NetezzaTableDataset, self).__init__(**kwargs) - self.type = 'NetezzaTable' # type: str + super(OracleServiceCloudObjectDataset, self).__init__(**kwargs) + self.type = 'OracleServiceCloudObject' # type: str self.table_name = kwargs.get('table_name', None) - self.table = kwargs.get('table', None) - self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) -class Notebook(msrest.serialization.Model): - """Notebook. +class OracleServiceCloudSource(TabularSource): + """A copy activity Oracle Service Cloud source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param description: The description of the notebook. - :type description: str - :param big_data_pool: Big data pool reference. - :type big_data_pool: ~azure.synapse.artifacts.models.BigDataPoolReference - :param session_properties: Session properties. - :type session_properties: ~azure.synapse.artifacts.models.NotebookSessionProperties - :param metadata: Required. Notebook root-level metadata. - :type metadata: ~azure.synapse.artifacts.models.NotebookMetadata - :param nbformat: Required. Notebook format (major number). Incremented between backwards - incompatible changes to the notebook format. - :type nbformat: int - :param nbformat_minor: Required. Notebook format (minor number). Incremented for backward - compatible changes to the notebook format. - :type nbformat_minor: int - :param cells: Required. Array of cells of the current notebook. - :type cells: list[~azure.synapse.artifacts.models.NotebookCell] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object """ _validation = { - 'metadata': {'required': True}, - 'nbformat': {'required': True}, - 'nbformat_minor': {'required': True}, - 'cells': {'required': True}, + 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'big_data_pool': {'key': 'bigDataPool', 'type': 'BigDataPoolReference'}, - 'session_properties': {'key': 'sessionProperties', 'type': 'NotebookSessionProperties'}, - 'metadata': {'key': 'metadata', 'type': 'NotebookMetadata'}, - 'nbformat': {'key': 'nbformat', 'type': 'int'}, - 'nbformat_minor': {'key': 'nbformat_minor', 'type': 'int'}, - 'cells': {'key': 'cells', 'type': '[NotebookCell]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, } def __init__( self, **kwargs ): - super(Notebook, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.description = kwargs.get('description', None) - self.big_data_pool = kwargs.get('big_data_pool', None) - self.session_properties = kwargs.get('session_properties', None) - self.metadata = kwargs['metadata'] - self.nbformat = kwargs['nbformat'] - self.nbformat_minor = kwargs['nbformat_minor'] - self.cells = kwargs['cells'] + super(OracleServiceCloudSource, self).__init__(**kwargs) + self.type = 'OracleServiceCloudSource' # type: str + self.query = kwargs.get('query', None) -class NotebookCell(msrest.serialization.Model): - """Notebook cell. +class OracleSink(CopySink): + """A copy activity Oracle sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param cell_type: Required. String identifying the type of cell. - :type cell_type: str - :param metadata: Required. Cell-level metadata. - :type metadata: object - :param source: Required. Contents of the cell, represented as an array of lines. - :type source: list[str] - :param attachments: Attachments associated with the cell. - :type attachments: object - :param outputs: Cell-level output items. - :type outputs: list[~azure.synapse.artifacts.models.NotebookCellOutputItem] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType + string). + :type pre_copy_script: object """ _validation = { - 'cell_type': {'required': True}, - 'metadata': {'required': True}, - 'source': {'required': True}, + 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'cell_type': {'key': 'cell_type', 'type': 'str'}, - 'metadata': {'key': 'metadata', 'type': 'object'}, - 'source': {'key': 'source', 'type': '[str]'}, - 'attachments': {'key': 'attachments', 'type': 'object'}, - 'outputs': {'key': 'outputs', 'type': '[NotebookCellOutputItem]'}, - } - - def __init__( - self, - **kwargs - ): - super(NotebookCell, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.cell_type = kwargs['cell_type'] - self.metadata = kwargs['metadata'] - self.source = kwargs['source'] - self.attachments = kwargs.get('attachments', None) - self.outputs = kwargs.get('outputs', None) - - -class NotebookCellOutputItem(msrest.serialization.Model): - """An item of the notebook cell execution output. - - All required parameters must be populated in order to send to Azure. - - :param name: For output_type=stream, determines the name of stream (stdout / stderr). - :type name: str - :param execution_count: Execution sequence number. - :type execution_count: int - :param output_type: Required. Execution, display, or stream outputs. Possible values include: - "execute_result", "display_data", "stream", "error". - :type output_type: str or ~azure.synapse.artifacts.models.CellOutputType - :param text: For output_type=stream, the stream's text output, represented as a string or an - array of strings. - :type text: object - :param data: Output data. Use MIME type as key, and content as value. - :type data: object - :param metadata: Metadata for the output item. - :type metadata: object - """ - - _validation = { - 'output_type': {'required': True}, - } - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'execution_count': {'key': 'execution_count', 'type': 'int'}, - 'output_type': {'key': 'output_type', 'type': 'str'}, - 'text': {'key': 'text', 'type': 'object'}, - 'data': {'key': 'data', 'type': 'object'}, - 'metadata': {'key': 'metadata', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } def __init__( self, **kwargs ): - super(NotebookCellOutputItem, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - self.execution_count = kwargs.get('execution_count', None) - self.output_type = kwargs['output_type'] - self.text = kwargs.get('text', None) - self.data = kwargs.get('data', None) - self.metadata = kwargs.get('metadata', None) + super(OracleSink, self).__init__(**kwargs) + self.type = 'OracleSink' # type: str + self.pre_copy_script = kwargs.get('pre_copy_script', None) -class NotebookKernelSpec(msrest.serialization.Model): - """Kernel information. +class OracleSource(CopySource): + """A copy activity Oracle source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Name of the kernel specification. - :type name: str - :param display_name: Required. Name to display in UI. - :type display_name: str + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param oracle_reader_query: Oracle reader query. Type: string (or Expression with resultType + string). + :type oracle_reader_query: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param partition_option: The partition mechanism that will be used for Oracle read in parallel. + Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". + :type partition_option: str or ~azure.synapse.artifacts.models.OraclePartitionOption + :param partition_settings: The settings that will be leveraged for Oracle source partitioning. + :type partition_settings: ~azure.synapse.artifacts.models.OraclePartitionSettings """ _validation = { - 'name': {'required': True}, - 'display_name': {'required': True}, + 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'display_name': {'key': 'display_name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'oracle_reader_query': {'key': 'oracleReaderQuery', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'OraclePartitionSettings'}, } def __init__( self, **kwargs ): - super(NotebookKernelSpec, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.name = kwargs['name'] - self.display_name = kwargs['display_name'] + super(OracleSource, self).__init__(**kwargs) + self.type = 'OracleSource' # type: str + self.oracle_reader_query = kwargs.get('oracle_reader_query', None) + self.query_timeout = kwargs.get('query_timeout', None) + self.partition_option = kwargs.get('partition_option', None) + self.partition_settings = kwargs.get('partition_settings', None) -class NotebookLanguageInfo(msrest.serialization.Model): - """Language info. +class OracleTableDataset(Dataset): + """The on-premises Oracle database dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. The programming language which this kernel runs. - :type name: str - :param codemirror_mode: The codemirror mode to use for code in this language. - :type codemirror_mode: str + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: This property will be retired. Please consider using schema + table + properties instead. + :type table_name: object + :param schema_type_properties_schema: The schema name of the on-premises Oracle database. Type: + string (or Expression with resultType string). + :type schema_type_properties_schema: object + :param table: The table name of the on-premises Oracle database. Type: string (or Expression + with resultType string). + :type table: object """ _validation = { - 'name': {'required': True}, + 'type': {'required': True}, + 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'codemirror_mode': {'key': 'codemirror_mode', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, } def __init__( self, **kwargs ): - super(NotebookLanguageInfo, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.name = kwargs['name'] - self.codemirror_mode = kwargs.get('codemirror_mode', None) + super(OracleTableDataset, self).__init__(**kwargs) + self.type = 'OracleTable' # type: str + self.table_name = kwargs.get('table_name', None) + self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) + self.table = kwargs.get('table', None) -class NotebookListResponse(msrest.serialization.Model): - """A list of Notebook resources. +class OrcDataset(Dataset): + """ORC dataset. All required parameters must be populated in order to send to Azure. - :param value: Required. List of Notebooks. - :type value: list[~azure.synapse.artifacts.models.NotebookResource] - :param next_link: The link to the next page of results, if any remaining results exist. - :type next_link: str + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param location: The location of the ORC data storage. + :type location: ~azure.synapse.artifacts.models.DatasetLocation + :param orc_compression_codec: Possible values include: "none", "zlib", "snappy". + :type orc_compression_codec: str or ~azure.synapse.artifacts.models.OrcCompressionCodec """ _validation = { - 'value': {'required': True}, + 'type': {'required': True}, + 'linked_service_name': {'required': True}, } _attribute_map = { - 'value': {'key': 'value', 'type': '[NotebookResource]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'orc_compression_codec': {'key': 'typeProperties.orcCompressionCodec', 'type': 'str'}, } def __init__( self, **kwargs ): - super(NotebookListResponse, self).__init__(**kwargs) - self.value = kwargs['value'] - self.next_link = kwargs.get('next_link', None) + super(OrcDataset, self).__init__(**kwargs) + self.type = 'Orc' # type: str + self.location = kwargs.get('location', None) + self.orc_compression_codec = kwargs.get('orc_compression_codec', None) -class NotebookMetadata(msrest.serialization.Model): - """Notebook root-level metadata. +class OrcFormat(DatasetStorageFormat): + """The data stored in Optimized Row Columnar (ORC) format. + + All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param kernelspec: Kernel information. - :type kernelspec: ~azure.synapse.artifacts.models.NotebookKernelSpec - :param language_info: Language info. - :type language_info: ~azure.synapse.artifacts.models.NotebookLanguageInfo + :param type: Required. Type of dataset storage format.Constant filled by server. + :type type: str + :param serializer: Serializer. Type: string (or Expression with resultType string). + :type serializer: object + :param deserializer: Deserializer. Type: string (or Expression with resultType string). + :type deserializer: object """ + _validation = { + 'type': {'required': True}, + } + _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'kernelspec': {'key': 'kernelspec', 'type': 'NotebookKernelSpec'}, - 'language_info': {'key': 'language_info', 'type': 'NotebookLanguageInfo'}, + 'type': {'key': 'type', 'type': 'str'}, + 'serializer': {'key': 'serializer', 'type': 'object'}, + 'deserializer': {'key': 'deserializer', 'type': 'object'}, } def __init__( self, **kwargs ): - super(NotebookMetadata, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.kernelspec = kwargs.get('kernelspec', None) - self.language_info = kwargs.get('language_info', None) - + super(OrcFormat, self).__init__(**kwargs) + self.type = 'OrcFormat' # type: str -class NotebookResource(SubResource): - """Notebook resource type. - Variables are only populated by the server, and will be ignored when sending a request. +class OrcSink(CopySink): + """A copy activity ORC sink. All required parameters must be populated in order to send to Azure. - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - :param properties: Required. Properties of Notebook. - :type properties: ~azure.synapse.artifacts.models.Notebook + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param store_settings: ORC store settings. + :type store_settings: ~azure.synapse.artifacts.models.StoreWriteSettings """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'properties': {'required': True}, + 'type': {'required': True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'Notebook'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, } def __init__( self, **kwargs ): - super(NotebookResource, self).__init__(**kwargs) - self.properties = kwargs['properties'] + super(OrcSink, self).__init__(**kwargs) + self.type = 'OrcSink' # type: str + self.store_settings = kwargs.get('store_settings', None) -class NotebookSessionProperties(msrest.serialization.Model): - """Session properties. +class OrcSource(CopySource): + """A copy activity ORC source. All required parameters must be populated in order to send to Azure. - :param driver_memory: Required. Amount of memory to use for the driver process. - :type driver_memory: str - :param driver_cores: Required. Number of cores to use for the driver. - :type driver_cores: int - :param executor_memory: Required. Amount of memory to use per executor process. - :type executor_memory: str - :param executor_cores: Required. Number of cores to use for each executor. - :type executor_cores: int - :param num_executors: Required. Number of executors to launch for this session. - :type num_executors: int + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param store_settings: ORC store settings. + :type store_settings: ~azure.synapse.artifacts.models.StoreReadSettings """ _validation = { - 'driver_memory': {'required': True}, - 'driver_cores': {'required': True}, - 'executor_memory': {'required': True}, - 'executor_cores': {'required': True}, - 'num_executors': {'required': True}, + 'type': {'required': True}, } _attribute_map = { - 'driver_memory': {'key': 'driverMemory', 'type': 'str'}, - 'driver_cores': {'key': 'driverCores', 'type': 'int'}, - 'executor_memory': {'key': 'executorMemory', 'type': 'str'}, - 'executor_cores': {'key': 'executorCores', 'type': 'int'}, - 'num_executors': {'key': 'numExecutors', 'type': 'int'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, } def __init__( self, **kwargs ): - super(NotebookSessionProperties, self).__init__(**kwargs) - self.driver_memory = kwargs['driver_memory'] - self.driver_cores = kwargs['driver_cores'] - self.executor_memory = kwargs['executor_memory'] - self.executor_cores = kwargs['executor_cores'] - self.num_executors = kwargs['num_executors'] + super(OrcSource, self).__init__(**kwargs) + self.type = 'OrcSource' # type: str + self.store_settings = kwargs.get('store_settings', None) -class ODataLinkedService(LinkedService): - """Open Data Protocol (OData) linked service. +class ParameterSpecification(msrest.serialization.Model): + """Definition of a single parameter for an entity. All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param url: Required. The URL of the OData service endpoint. Type: string (or Expression with - resultType string). - :type url: object - :param authentication_type: Type of authentication used to connect to the OData service. - Possible values include: "Basic", "Anonymous", "Windows", "AadServicePrincipal", - "ManagedServiceIdentity". - :type authentication_type: str or ~azure.synapse.artifacts.models.ODataAuthenticationType - :param user_name: User name of the OData service. Type: string (or Expression with resultType - string). - :type user_name: object - :param password: Password of the OData service. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param tenant: Specify the tenant information (domain name or tenant ID) under which your - application resides. Type: string (or Expression with resultType string). - :type tenant: object - :param service_principal_id: Specify the application id of your application registered in Azure - Active Directory. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param aad_resource_id: Specify the resource you are requesting authorization to use Directory. - Type: string (or Expression with resultType string). - :type aad_resource_id: object - :param aad_service_principal_credential_type: Specify the credential type (key or cert) is used - for service principal. Possible values include: "ServicePrincipalKey", "ServicePrincipalCert". - :type aad_service_principal_credential_type: str or - ~azure.synapse.artifacts.models.ODataAadServicePrincipalCredentialType - :param service_principal_key: Specify the secret of your application registered in Azure Active - Directory. Type: string (or Expression with resultType string). - :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase - :param service_principal_embedded_cert: Specify the base64 encoded certificate of your - application registered in Azure Active Directory. Type: string (or Expression with resultType - string). - :type service_principal_embedded_cert: ~azure.synapse.artifacts.models.SecretBase - :param service_principal_embedded_cert_password: Specify the password of your certificate if - your certificate has a password and you are using AadServicePrincipal authentication. Type: - string (or Expression with resultType string). - :type service_principal_embedded_cert_password: ~azure.synapse.artifacts.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param type: Required. Parameter type. Possible values include: "Object", "String", "Int", + "Float", "Bool", "Array", "SecureString". + :type type: str or ~azure.synapse.artifacts.models.ParameterType + :param default_value: Default value of parameter. + :type default_value: object """ _validation = { 'type': {'required': True}, - 'url': {'required': True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, - 'aad_service_principal_credential_type': {'key': 'typeProperties.aadServicePrincipalCredentialType', 'type': 'str'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'service_principal_embedded_cert': {'key': 'typeProperties.servicePrincipalEmbeddedCert', 'type': 'SecretBase'}, - 'service_principal_embedded_cert_password': {'key': 'typeProperties.servicePrincipalEmbeddedCertPassword', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'default_value': {'key': 'defaultValue', 'type': 'object'}, } def __init__( self, **kwargs ): - super(ODataLinkedService, self).__init__(**kwargs) - self.type = 'OData' # type: str - self.url = kwargs['url'] - self.authentication_type = kwargs.get('authentication_type', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.tenant = kwargs.get('tenant', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.aad_resource_id = kwargs.get('aad_resource_id', None) - self.aad_service_principal_credential_type = kwargs.get('aad_service_principal_credential_type', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.service_principal_embedded_cert = kwargs.get('service_principal_embedded_cert', None) - self.service_principal_embedded_cert_password = kwargs.get('service_principal_embedded_cert_password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(ParameterSpecification, self).__init__(**kwargs) + self.type = kwargs['type'] + self.default_value = kwargs.get('default_value', None) -class ODataResourceDataset(Dataset): - """The Open Data Protocol (OData) resource dataset. +class ParquetDataset(Dataset): + """Parquet dataset. All required parameters must be populated in order to send to Azure. @@ -12486,8 +20778,10 @@ class ODataResourceDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param path: The OData resource path. Type: string (or Expression with resultType string). - :type path: object + :param location: The location of the parquet storage. + :type location: ~azure.synapse.artifacts.models.DatasetLocation + :param compression_codec: Possible values include: "none", "gzip", "snappy", "lzo". + :type compression_codec: str or ~azure.synapse.artifacts.models.ParquetCompressionCodec """ _validation = { @@ -12505,151 +20799,232 @@ class ODataResourceDataset(Dataset): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'path': {'key': 'typeProperties.path', 'type': 'object'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'str'}, } def __init__( self, **kwargs ): - super(ODataResourceDataset, self).__init__(**kwargs) - self.type = 'ODataResource' # type: str - self.path = kwargs.get('path', None) + super(ParquetDataset, self).__init__(**kwargs) + self.type = 'Parquet' # type: str + self.location = kwargs.get('location', None) + self.compression_codec = kwargs.get('compression_codec', None) -class OdbcLinkedService(LinkedService): - """Open Database Connectivity (ODBC) linked service. +class ParquetFormat(DatasetStorageFormat): + """The data stored in Parquet format. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Type of dataset storage format.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: Required. The non-access credential portion of the connection string - as well as an optional encrypted credential. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param authentication_type: Type of authentication used to connect to the ODBC data store. - Possible values are: Anonymous and Basic. Type: string (or Expression with resultType string). - :type authentication_type: object - :param credential: The access credential portion of the connection string specified in driver- - specific property-value format. - :type credential: ~azure.synapse.artifacts.models.SecretBase - :param user_name: User name for Basic authentication. Type: string (or Expression with - resultType string). - :type user_name: object - :param password: Password for Basic authentication. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param serializer: Serializer. Type: string (or Expression with resultType string). + :type serializer: object + :param deserializer: Deserializer. Type: string (or Expression with resultType string). + :type deserializer: object """ _validation = { 'type': {'required': True}, - 'connection_string': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, - 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'serializer': {'key': 'serializer', 'type': 'object'}, + 'deserializer': {'key': 'deserializer', 'type': 'object'}, } def __init__( self, **kwargs ): - super(OdbcLinkedService, self).__init__(**kwargs) - self.type = 'Odbc' # type: str - self.connection_string = kwargs['connection_string'] - self.authentication_type = kwargs.get('authentication_type', None) - self.credential = kwargs.get('credential', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(ParquetFormat, self).__init__(**kwargs) + self.type = 'ParquetFormat' # type: str -class OdbcTableDataset(Dataset): - """The ODBC table dataset. +class ParquetSink(CopySink): + """A copy activity Parquet sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Copy sink type.Constant filled by server. :type type: str - :param description: Dataset description. + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param store_settings: Parquet store settings. + :type store_settings: ~azure.synapse.artifacts.models.StoreWriteSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + } + + def __init__( + self, + **kwargs + ): + super(ParquetSink, self).__init__(**kwargs) + self.type = 'ParquetSink' # type: str + self.store_settings = kwargs.get('store_settings', None) + + +class ParquetSource(CopySource): + """A copy activity Parquet source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param store_settings: Parquet store settings. + :type store_settings: ~azure.synapse.artifacts.models.StoreReadSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + } + + def __init__( + self, + **kwargs + ): + super(ParquetSource, self).__init__(**kwargs) + self.type = 'ParquetSource' # type: str + self.store_settings = kwargs.get('store_settings', None) + + +class PaypalLinkedService(LinkedService): + """Paypal Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. + :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. + :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The ODBC table name. Type: string (or Expression with resultType string). - :type table_name: object + :param host: Required. The URL of the PayPal instance. (i.e. api.sandbox.paypal.com). + :type host: object + :param client_id: Required. The client ID associated with your PayPal application. + :type client_id: object + :param client_secret: The client secret associated with your PayPal application. + :type client_secret: ~azure.synapse.artifacts.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using + HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name in the server's + certificate to match the host name of the server when connecting over SSL. The default value is + true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of the server when + connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, + 'host': {'required': True}, + 'client_id': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, **kwargs ): - super(OdbcTableDataset, self).__init__(**kwargs) - self.type = 'OdbcTable' # type: str - self.table_name = kwargs.get('table_name', None) + super(PaypalLinkedService, self).__init__(**kwargs) + self.type = 'Paypal' # type: str + self.host = kwargs['host'] + self.client_id = kwargs['client_id'] + self.client_secret = kwargs.get('client_secret', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) -class Office365Dataset(Dataset): - """The Office365 account. +class PaypalObjectDataset(Dataset): + """Paypal Service dataset. All required parameters must be populated in order to send to Azure. @@ -12675,18 +21050,13 @@ class Office365Dataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: Required. Name of the dataset to extract from Office 365. Type: string (or - Expression with resultType string). + :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object - :param predicate: A predicate expression that can be used to filter the specific rows to - extract from Office 365. Type: string (or Expression with resultType string). - :type predicate: object """ _validation = { 'type': {'required': True}, 'linked_service_name': {'required': True}, - 'table_name': {'required': True}, } _attribute_map = { @@ -12700,148 +21070,69 @@ class Office365Dataset(Dataset): 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'predicate': {'key': 'typeProperties.predicate', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(Office365Dataset, self).__init__(**kwargs) - self.type = 'Office365Table' # type: str - self.table_name = kwargs['table_name'] - self.predicate = kwargs.get('predicate', None) - - -class Office365LinkedService(LinkedService): - """Office365 linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param office365_tenant_id: Required. Azure tenant ID to which the Office 365 account belongs. - Type: string (or Expression with resultType string). - :type office365_tenant_id: object - :param service_principal_tenant_id: Required. Specify the tenant information under which your - Azure AD web application resides. Type: string (or Expression with resultType string). - :type service_principal_tenant_id: object - :param service_principal_id: Required. Specify the application's client ID. Type: string (or - Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: Required. Specify the application's key. - :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'office365_tenant_id': {'required': True}, - 'service_principal_tenant_id': {'required': True}, - 'service_principal_id': {'required': True}, - 'service_principal_key': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'office365_tenant_id': {'key': 'typeProperties.office365TenantId', 'type': 'object'}, - 'service_principal_tenant_id': {'key': 'typeProperties.servicePrincipalTenantId', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, **kwargs ): - super(Office365LinkedService, self).__init__(**kwargs) - self.type = 'Office365' # type: str - self.office365_tenant_id = kwargs['office365_tenant_id'] - self.service_principal_tenant_id = kwargs['service_principal_tenant_id'] - self.service_principal_id = kwargs['service_principal_id'] - self.service_principal_key = kwargs['service_principal_key'] - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(PaypalObjectDataset, self).__init__(**kwargs) + self.type = 'PaypalObject' # type: str + self.table_name = kwargs.get('table_name', None) -class OracleLinkedService(LinkedService): - """Oracle database. +class PaypalSource(TabularSource): + """A copy activity Paypal Service source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: Required. The connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param password: The Azure key vault secret reference of password in connection string. - :type password: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object """ _validation = { 'type': {'required': True}, - 'connection_string': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, } def __init__( self, **kwargs ): - super(OracleLinkedService, self).__init__(**kwargs) - self.type = 'Oracle' # type: str - self.connection_string = kwargs['connection_string'] - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(PaypalSource, self).__init__(**kwargs) + self.type = 'PaypalSource' # type: str + self.query = kwargs.get('query', None) -class OracleServiceCloudLinkedService(LinkedService): - """Oracle Service Cloud linked service. +class PhoenixLinkedService(LinkedService): + """Phoenix server linked service. All required parameters must be populated in order to send to Azure. @@ -12858,24 +21149,40 @@ class OracleServiceCloudLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param host: Required. The URL of the Oracle Service Cloud instance. + :param host: Required. The IP address or host name of the Phoenix server. (i.e. + 192.168.222.160). :type host: object - :param username: Required. The user name that you use to access Oracle Service Cloud server. + :param port: The TCP port that the Phoenix server uses to listen for client connections. The + default value is 8765. + :type port: object + :param http_path: The partial URL corresponding to the Phoenix server. (i.e. + /gateway/sandbox/phoenix/version). The default value is hbasephoenix if using + WindowsAzureHDInsightService. + :type http_path: object + :param authentication_type: Required. The authentication mechanism used to connect to the + Phoenix server. Possible values include: "Anonymous", "UsernameAndPassword", + "WindowsAzureHDInsightService". + :type authentication_type: str or ~azure.synapse.artifacts.models.PhoenixAuthenticationType + :param username: The user name used to connect to the Phoenix server. :type username: object - :param password: Required. The password corresponding to the user name that you provided in the - username key. + :param password: The password corresponding to the user name. :type password: ~azure.synapse.artifacts.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using - HTTPS. The default value is true. Type: boolean (or Expression with resultType boolean). - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name in the server's - certificate to match the host name of the server when connecting over SSL. The default value is - true. Type: boolean (or Expression with resultType boolean). - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of the server when - connecting over SSL. The default value is true. Type: boolean (or Expression with resultType - boolean). - :type use_peer_verification: object + :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The + default value is false. + :type enable_ssl: object + :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for + verifying the server when connecting over SSL. This property can only be set when using SSL on + self-hosted IR. The default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust + store or from a specified PEM file. The default value is false. + :type use_system_trust_store: object + :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate + name to match the host name of the server when connecting over SSL. The default value is false. + :type allow_host_name_cn_mismatch: object + :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from + the server. The default value is false. + :type allow_self_signed_server_cert: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -12885,8 +21192,7 @@ class OracleServiceCloudLinkedService(LinkedService): _validation = { 'type': {'required': True}, 'host': {'required': True}, - 'username': {'required': True}, - 'password': {'required': True}, + 'authentication_type': {'required': True}, } _attribute_map = { @@ -12897,11 +21203,16 @@ class OracleServiceCloudLinkedService(LinkedService): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, + 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -12909,19 +21220,24 @@ def __init__( self, **kwargs ): - super(OracleServiceCloudLinkedService, self).__init__(**kwargs) - self.type = 'OracleServiceCloud' # type: str + super(PhoenixLinkedService, self).__init__(**kwargs) + self.type = 'Phoenix' # type: str self.host = kwargs['host'] - self.username = kwargs['username'] - self.password = kwargs['password'] - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.port = kwargs.get('port', None) + self.http_path = kwargs.get('http_path', None) + self.authentication_type = kwargs['authentication_type'] + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.enable_ssl = kwargs.get('enable_ssl', None) + self.trusted_cert_path = kwargs.get('trusted_cert_path', None) + self.use_system_trust_store = kwargs.get('use_system_trust_store', None) + self.allow_host_name_cn_mismatch = kwargs.get('allow_host_name_cn_mismatch', None) + self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) -class OracleServiceCloudObjectDataset(Dataset): - """Oracle Service Cloud dataset. +class PhoenixObjectDataset(Dataset): + """Phoenix server dataset. All required parameters must be populated in order to send to Azure. @@ -12947,8 +21263,15 @@ class OracleServiceCloudObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). + :param table_name: This property will be retired. Please consider using schema + table + properties instead. :type table_name: object + :param table: The table name of the Phoenix. Type: string (or Expression with resultType + string). + :type table: object + :param schema_type_properties_schema: The schema name of the Phoenix. Type: string (or + Expression with resultType string). + :type schema_type_properties_schema: object """ _validation = { @@ -12967,242 +21290,424 @@ class OracleServiceCloudObjectDataset(Dataset): 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } def __init__( self, **kwargs ): - super(OracleServiceCloudObjectDataset, self).__init__(**kwargs) - self.type = 'OracleServiceCloudObject' # type: str + super(PhoenixObjectDataset, self).__init__(**kwargs) + self.type = 'PhoenixObject' # type: str self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) -class OracleTableDataset(Dataset): - """The on-premises Oracle database dataset. +class PhoenixSource(TabularSource): + """A copy activity Phoenix server source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param description: Dataset description. + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(PhoenixSource, self).__init__(**kwargs) + self.type = 'PhoenixSource' # type: str + self.query = kwargs.get('query', None) + + +class PipelineFolder(msrest.serialization.Model): + """The folder that this Pipeline is in. If not specified, Pipeline will appear at the root level. + + :param name: The name of the folder that this Pipeline is in. + :type name: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(PipelineFolder, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + + +class PipelineListResponse(msrest.serialization.Model): + """A list of pipeline resources. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of pipelines. + :type value: list[~azure.synapse.artifacts.models.PipelineResource] + :param next_link: The link to the next page of results, if any remaining results exist. + :type next_link: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[PipelineResource]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(PipelineListResponse, self).__init__(**kwargs) + self.value = kwargs['value'] + self.next_link = kwargs.get('next_link', None) + + +class PipelineReference(msrest.serialization.Model): + """Pipeline reference type. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Pipeline reference type. Possible values include: "PipelineReference". + :type type: str or ~azure.synapse.artifacts.models.PipelineReferenceType + :param reference_name: Required. Reference pipeline name. + :type reference_name: str + :param name: Reference name. + :type name: str + """ + + _validation = { + 'type': {'required': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(PipelineReference, self).__init__(**kwargs) + self.type = kwargs['type'] + self.reference_name = kwargs['reference_name'] + self.name = kwargs.get('name', None) + + +class PipelineResource(AzureEntityResource): + """Pipeline resource type. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + :ivar etag: Resource Etag. + :vartype etag: str + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param description: The description of the pipeline. :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. + :param activities: List of activities in pipeline. + :type activities: list[~azure.synapse.artifacts.models.Activity] + :param parameters: List of parameters for pipeline. :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. + :param variables: List of variables for pipeline. + :type variables: dict[str, ~azure.synapse.artifacts.models.VariableSpecification] + :param concurrency: The max number of concurrent runs for the pipeline. + :type concurrency: int + :param annotations: List of tags that can be used for describing the Pipeline. :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table - properties instead. - :type table_name: object - :param schema_type_properties_schema: The schema name of the on-premises Oracle database. Type: - string (or Expression with resultType string). - :type schema_type_properties_schema: object - :param table: The table name of the on-premises Oracle database. Type: string (or Expression - with resultType string). - :type table: object + :param run_dimensions: Dimensions emitted by Pipeline. + :type run_dimensions: dict[str, object] + :param folder: The folder that this Pipeline is in. If not specified, Pipeline will appear at + the root level. + :type folder: ~azure.synapse.artifacts.models.PipelineFolder + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'concurrency': {'minimum': 1}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'properties.description', 'type': 'str'}, + 'activities': {'key': 'properties.activities', 'type': '[Activity]'}, + 'parameters': {'key': 'properties.parameters', 'type': '{ParameterSpecification}'}, + 'variables': {'key': 'properties.variables', 'type': '{VariableSpecification}'}, + 'concurrency': {'key': 'properties.concurrency', 'type': 'int'}, + 'annotations': {'key': 'properties.annotations', 'type': '[object]'}, + 'run_dimensions': {'key': 'properties.runDimensions', 'type': '{object}'}, + 'folder': {'key': 'properties.folder', 'type': 'PipelineFolder'}, + } + + def __init__( + self, + **kwargs + ): + super(PipelineResource, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.description = kwargs.get('description', None) + self.activities = kwargs.get('activities', None) + self.parameters = kwargs.get('parameters', None) + self.variables = kwargs.get('variables', None) + self.concurrency = kwargs.get('concurrency', None) + self.annotations = kwargs.get('annotations', None) + self.run_dimensions = kwargs.get('run_dimensions', None) + self.folder = kwargs.get('folder', None) + + +class PipelineRun(msrest.serialization.Model): + """Information about a pipeline run. + + Variables are only populated by the server, and will be ignored when sending a request. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :ivar run_id: Identifier of a run. + :vartype run_id: str + :ivar run_group_id: Identifier that correlates all the recovery runs of a pipeline run. + :vartype run_group_id: str + :ivar is_latest: Indicates if the recovered pipeline run is the latest in its group. + :vartype is_latest: bool + :ivar pipeline_name: The pipeline name. + :vartype pipeline_name: str + :ivar parameters: The full or partial list of parameter name, value pair used in the pipeline + run. + :vartype parameters: dict[str, str] + :ivar invoked_by: Entity that started the pipeline run. + :vartype invoked_by: ~azure.synapse.artifacts.models.PipelineRunInvokedBy + :ivar last_updated: The last updated timestamp for the pipeline run event in ISO8601 format. + :vartype last_updated: ~datetime.datetime + :ivar run_start: The start time of a pipeline run in ISO8601 format. + :vartype run_start: ~datetime.datetime + :ivar run_end: The end time of a pipeline run in ISO8601 format. + :vartype run_end: ~datetime.datetime + :ivar duration_in_ms: The duration of a pipeline run. + :vartype duration_in_ms: int + :ivar status: The status of a pipeline run. + :vartype status: str + :ivar message: The message from a pipeline run. + :vartype message: str """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + 'run_id': {'readonly': True}, + 'run_group_id': {'readonly': True}, + 'is_latest': {'readonly': True}, + 'pipeline_name': {'readonly': True}, + 'parameters': {'readonly': True}, + 'invoked_by': {'readonly': True}, + 'last_updated': {'readonly': True}, + 'run_start': {'readonly': True}, + 'run_end': {'readonly': True}, + 'duration_in_ms': {'readonly': True}, + 'status': {'readonly': True}, + 'message': {'readonly': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'run_id': {'key': 'runId', 'type': 'str'}, + 'run_group_id': {'key': 'runGroupId', 'type': 'str'}, + 'is_latest': {'key': 'isLatest', 'type': 'bool'}, + 'pipeline_name': {'key': 'pipelineName', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{str}'}, + 'invoked_by': {'key': 'invokedBy', 'type': 'PipelineRunInvokedBy'}, + 'last_updated': {'key': 'lastUpdated', 'type': 'iso-8601'}, + 'run_start': {'key': 'runStart', 'type': 'iso-8601'}, + 'run_end': {'key': 'runEnd', 'type': 'iso-8601'}, + 'duration_in_ms': {'key': 'durationInMs', 'type': 'int'}, + 'status': {'key': 'status', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, } def __init__( self, **kwargs ): - super(OracleTableDataset, self).__init__(**kwargs) - self.type = 'OracleTable' # type: str - self.table_name = kwargs.get('table_name', None) - self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) - self.table = kwargs.get('table', None) + super(PipelineRun, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.run_id = None + self.run_group_id = None + self.is_latest = None + self.pipeline_name = None + self.parameters = None + self.invoked_by = None + self.last_updated = None + self.run_start = None + self.run_end = None + self.duration_in_ms = None + self.status = None + self.message = None -class OrcDataset(Dataset): - """ORC dataset. +class PipelineRunInvokedBy(msrest.serialization.Model): + """Provides entity name and id that started the pipeline run. - All required parameters must be populated in order to send to Azure. + Variables are only populated by the server, and will be ignored when sending a request. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param location: The location of the ORC data storage. - :type location: ~azure.synapse.artifacts.models.DatasetLocation - :param orc_compression_codec: Possible values include: "none", "zlib", "snappy". - :type orc_compression_codec: str or ~azure.synapse.artifacts.models.OrcCompressionCodec + :ivar name: Name of the entity that started the pipeline run. + :vartype name: str + :ivar id: The ID of the entity that started the run. + :vartype id: str + :ivar invoked_by_type: The type of the entity that started the run. + :vartype invoked_by_type: str """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + 'name': {'readonly': True}, + 'id': {'readonly': True}, + 'invoked_by_type': {'readonly': True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'orc_compression_codec': {'key': 'typeProperties.orcCompressionCodec', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'id': {'key': 'id', 'type': 'str'}, + 'invoked_by_type': {'key': 'invokedByType', 'type': 'str'}, } def __init__( self, **kwargs ): - super(OrcDataset, self).__init__(**kwargs) - self.type = 'Orc' # type: str - self.location = kwargs.get('location', None) - self.orc_compression_codec = kwargs.get('orc_compression_codec', None) + super(PipelineRunInvokedBy, self).__init__(**kwargs) + self.name = None + self.id = None + self.invoked_by_type = None -class ParameterSpecification(msrest.serialization.Model): - """Definition of a single parameter for an entity. +class PipelineRunsQueryResponse(msrest.serialization.Model): + """A list pipeline runs. All required parameters must be populated in order to send to Azure. - :param type: Required. Parameter type. Possible values include: "Object", "String", "Int", - "Float", "Bool", "Array", "SecureString". - :type type: str or ~azure.synapse.artifacts.models.ParameterType - :param default_value: Default value of parameter. - :type default_value: object + :param value: Required. List of pipeline runs. + :type value: list[~azure.synapse.artifacts.models.PipelineRun] + :param continuation_token: The continuation token for getting the next page of results, if any + remaining results exist, null otherwise. + :type continuation_token: str """ _validation = { - 'type': {'required': True}, + 'value': {'required': True}, } _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'default_value': {'key': 'defaultValue', 'type': 'object'}, + 'value': {'key': 'value', 'type': '[PipelineRun]'}, + 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, } def __init__( self, **kwargs ): - super(ParameterSpecification, self).__init__(**kwargs) - self.type = kwargs['type'] - self.default_value = kwargs.get('default_value', None) - + super(PipelineRunsQueryResponse, self).__init__(**kwargs) + self.value = kwargs['value'] + self.continuation_token = kwargs.get('continuation_token', None) -class ParquetDataset(Dataset): - """Parquet dataset. - All required parameters must be populated in order to send to Azure. +class PolybaseSettings(msrest.serialization.Model): + """PolyBase settings. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param location: The location of the parquet storage. - :type location: ~azure.synapse.artifacts.models.DatasetLocation - :param compression_codec: Possible values include: "none", "gzip", "snappy", "lzo". - :type compression_codec: str or ~azure.synapse.artifacts.models.ParquetCompressionCodec + :param reject_type: Reject type. Possible values include: "value", "percentage". + :type reject_type: str or ~azure.synapse.artifacts.models.PolybaseSettingsRejectType + :param reject_value: Specifies the value or the percentage of rows that can be rejected before + the query fails. Type: number (or Expression with resultType number), minimum: 0. + :type reject_value: object + :param reject_sample_value: Determines the number of rows to attempt to retrieve before the + PolyBase recalculates the percentage of rejected rows. Type: integer (or Expression with + resultType integer), minimum: 0. + :type reject_sample_value: object + :param use_type_default: Specifies how to handle missing values in delimited text files when + PolyBase retrieves data from the text file. Type: boolean (or Expression with resultType + boolean). + :type use_type_default: object """ - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'str'}, + 'reject_type': {'key': 'rejectType', 'type': 'str'}, + 'reject_value': {'key': 'rejectValue', 'type': 'object'}, + 'reject_sample_value': {'key': 'rejectSampleValue', 'type': 'object'}, + 'use_type_default': {'key': 'useTypeDefault', 'type': 'object'}, } def __init__( self, **kwargs ): - super(ParquetDataset, self).__init__(**kwargs) - self.type = 'Parquet' # type: str - self.location = kwargs.get('location', None) - self.compression_codec = kwargs.get('compression_codec', None) + super(PolybaseSettings, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.reject_type = kwargs.get('reject_type', None) + self.reject_value = kwargs.get('reject_value', None) + self.reject_sample_value = kwargs.get('reject_sample_value', None) + self.use_type_default = kwargs.get('use_type_default', None) -class PaypalLinkedService(LinkedService): - """Paypal Service linked service. +class PostgreSqlLinkedService(LinkedService): + """Linked service for PostgreSQL data source. All required parameters must be populated in order to send to Azure. @@ -13214,27 +21719,15 @@ class PaypalLinkedService(LinkedService): :param connect_via: The integration runtime reference. :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param host: Required. The URL of the PayPal instance. (i.e. api.sandbox.paypal.com). - :type host: object - :param client_id: Required. The client ID associated with your PayPal application. - :type client_id: object - :param client_secret: The client secret associated with your PayPal application. - :type client_secret: ~azure.synapse.artifacts.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using - HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name in the server's - certificate to match the host name of the server when connecting over SSL. The default value is - true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of the server when - connecting over SSL. The default value is true. - :type use_peer_verification: object + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param connection_string: Required. The connection string. + :type connection_string: object + :param password: The Azure key vault secret reference of password in connection string. + :type password: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -13243,8 +21736,7 @@ class PaypalLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'host': {'required': True}, - 'client_id': {'required': True}, + 'connection_string': {'required': True}, } _attribute_map = { @@ -13254,12 +21746,8 @@ class PaypalLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -13267,19 +21755,64 @@ def __init__( self, **kwargs ): - super(PaypalLinkedService, self).__init__(**kwargs) - self.type = 'Paypal' # type: str - self.host = kwargs['host'] - self.client_id = kwargs['client_id'] - self.client_secret = kwargs.get('client_secret', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) + super(PostgreSqlLinkedService, self).__init__(**kwargs) + self.type = 'PostgreSql' # type: str + self.connection_string = kwargs['connection_string'] + self.password = kwargs.get('password', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) -class PaypalObjectDataset(Dataset): - """Paypal Service dataset. +class PostgreSqlSource(TabularSource): + """A copy activity source for PostgreSQL databases. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: Database query. Type: string (or Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(PostgreSqlSource, self).__init__(**kwargs) + self.type = 'PostgreSqlSource' # type: str + self.query = kwargs.get('query', None) + + +class PostgreSqlTableDataset(Dataset): + """The PostgreSQL table dataset. All required parameters must be populated in order to send to Azure. @@ -13305,8 +21838,14 @@ class PaypalObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). + :param table_name: This property will be retired. Please consider using schema + table + properties instead. :type table_name: object + :param table: The PostgreSQL table name. Type: string (or Expression with resultType string). + :type table: object + :param schema_type_properties_schema: The PostgreSQL schema name. Type: string (or Expression + with resultType string). + :type schema_type_properties_schema: object """ _validation = { @@ -13325,19 +21864,23 @@ class PaypalObjectDataset(Dataset): 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } def __init__( self, **kwargs ): - super(PaypalObjectDataset, self).__init__(**kwargs) - self.type = 'PaypalObject' # type: str + super(PostgreSqlTableDataset, self).__init__(**kwargs) + self.type = 'PostgreSqlTable' # type: str self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) -class PhoenixLinkedService(LinkedService): - """Phoenix server linked service. +class PrestoLinkedService(LinkedService): + """Presto server linked service. All required parameters must be populated in order to send to Azure. @@ -13354,21 +21897,20 @@ class PhoenixLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param host: Required. The IP address or host name of the Phoenix server. (i.e. + :param host: Required. The IP address or host name of the Presto server. (i.e. 192.168.222.160). :type host: object - :param port: The TCP port that the Phoenix server uses to listen for client connections. The - default value is 8765. + :param server_version: Required. The version of the Presto server. (i.e. 0.148-t). + :type server_version: object + :param catalog: Required. The catalog context for all request against the server. + :type catalog: object + :param port: The TCP port that the Presto server uses to listen for client connections. The + default value is 8080. :type port: object - :param http_path: The partial URL corresponding to the Phoenix server. (i.e. - /gateway/sandbox/phoenix/version). The default value is hbasephoenix if using - WindowsAzureHDInsightService. - :type http_path: object :param authentication_type: Required. The authentication mechanism used to connect to the - Phoenix server. Possible values include: "Anonymous", "UsernameAndPassword", - "WindowsAzureHDInsightService". - :type authentication_type: str or ~azure.synapse.artifacts.models.PhoenixAuthenticationType - :param username: The user name used to connect to the Phoenix server. + Presto server. Possible values include: "Anonymous", "LDAP". + :type authentication_type: str or ~azure.synapse.artifacts.models.PrestoAuthenticationType + :param username: The user name used to connect to the Presto server. :type username: object :param password: The password corresponding to the user name. :type password: ~azure.synapse.artifacts.models.SecretBase @@ -13388,6 +21930,9 @@ class PhoenixLinkedService(LinkedService): :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from the server. The default value is false. :type allow_self_signed_server_cert: object + :param time_zone_id: The local time zone used by the connection. Valid values for this option + are specified in the IANA Time Zone Database. The default value is the system time zone. + :type time_zone_id: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -13397,6 +21942,8 @@ class PhoenixLinkedService(LinkedService): _validation = { 'type': {'required': True}, 'host': {'required': True}, + 'server_version': {'required': True}, + 'catalog': {'required': True}, 'authentication_type': {'required': True}, } @@ -13408,8 +21955,9 @@ class PhoenixLinkedService(LinkedService): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'server_version': {'key': 'typeProperties.serverVersion', 'type': 'object'}, + 'catalog': {'key': 'typeProperties.catalog', 'type': 'object'}, 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, @@ -13418,6 +21966,7 @@ class PhoenixLinkedService(LinkedService): 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, + 'time_zone_id': {'key': 'typeProperties.timeZoneID', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -13425,11 +21974,12 @@ def __init__( self, **kwargs ): - super(PhoenixLinkedService, self).__init__(**kwargs) - self.type = 'Phoenix' # type: str + super(PrestoLinkedService, self).__init__(**kwargs) + self.type = 'Presto' # type: str self.host = kwargs['host'] + self.server_version = kwargs['server_version'] + self.catalog = kwargs['catalog'] self.port = kwargs.get('port', None) - self.http_path = kwargs.get('http_path', None) self.authentication_type = kwargs['authentication_type'] self.username = kwargs.get('username', None) self.password = kwargs.get('password', None) @@ -13438,11 +21988,12 @@ def __init__( self.use_system_trust_store = kwargs.get('use_system_trust_store', None) self.allow_host_name_cn_mismatch = kwargs.get('allow_host_name_cn_mismatch', None) self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) + self.time_zone_id = kwargs.get('time_zone_id', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) -class PhoenixObjectDataset(Dataset): - """Phoenix server dataset. +class PrestoObjectDataset(Dataset): + """Presto server dataset. All required parameters must be populated in order to send to Azure. @@ -13471,10 +22022,10 @@ class PhoenixObjectDataset(Dataset): :param table_name: This property will be retired. Please consider using schema + table properties instead. :type table_name: object - :param table: The table name of the Phoenix. Type: string (or Expression with resultType + :param table: The table name of the Presto. Type: string (or Expression with resultType string). :type table: object - :param schema_type_properties_schema: The schema name of the Phoenix. Type: string (or + :param schema_type_properties_schema: The schema name of the Presto. Type: string (or Expression with resultType string). :type schema_type_properties_schema: object """ @@ -13503,447 +22054,230 @@ def __init__( self, **kwargs ): - super(PhoenixObjectDataset, self).__init__(**kwargs) - self.type = 'PhoenixObject' # type: str + super(PrestoObjectDataset, self).__init__(**kwargs) + self.type = 'PrestoObject' # type: str self.table_name = kwargs.get('table_name', None) self.table = kwargs.get('table', None) self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) - - -class PipelineFolder(msrest.serialization.Model): - """The folder that this Pipeline is in. If not specified, Pipeline will appear at the root level. - - :param name: The name of the folder that this Pipeline is in. - :type name: str - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(PipelineFolder, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - - -class PipelineListResponse(msrest.serialization.Model): - """A list of pipeline resources. - - All required parameters must be populated in order to send to Azure. - - :param value: Required. List of pipelines. - :type value: list[~azure.synapse.artifacts.models.PipelineResource] - :param next_link: The link to the next page of results, if any remaining results exist. - :type next_link: str - """ - - _validation = { - 'value': {'required': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': '[PipelineResource]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(PipelineListResponse, self).__init__(**kwargs) - self.value = kwargs['value'] - self.next_link = kwargs.get('next_link', None) - - -class PipelineReference(msrest.serialization.Model): - """Pipeline reference type. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Pipeline reference type. Possible values include: "PipelineReference". - :type type: str or ~azure.synapse.artifacts.models.PipelineReferenceType - :param reference_name: Required. Reference pipeline name. - :type reference_name: str - :param name: Reference name. - :type name: str - """ - - _validation = { - 'type': {'required': True}, - 'reference_name': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'reference_name': {'key': 'referenceName', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(PipelineReference, self).__init__(**kwargs) - self.type = kwargs['type'] - self.reference_name = kwargs['reference_name'] - self.name = kwargs.get('name', None) - - -class PipelineResource(SubResource): - """Pipeline resource type. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param description: The description of the pipeline. - :type description: str - :param activities: List of activities in pipeline. - :type activities: list[~azure.synapse.artifacts.models.Activity] - :param parameters: List of parameters for pipeline. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param variables: List of variables for pipeline. - :type variables: dict[str, ~azure.synapse.artifacts.models.VariableSpecification] - :param concurrency: The max number of concurrent runs for the pipeline. - :type concurrency: int - :param annotations: List of tags that can be used for describing the Pipeline. - :type annotations: list[object] - :param run_dimensions: Dimensions emitted by Pipeline. - :type run_dimensions: dict[str, object] - :param folder: The folder that this Pipeline is in. If not specified, Pipeline will appear at - the root level. - :type folder: ~azure.synapse.artifacts.models.PipelineFolder + + +class PrestoSource(TabularSource): + """A copy activity Presto server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'concurrency': {'minimum': 1}, + 'type': {'required': True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'properties.description', 'type': 'str'}, - 'activities': {'key': 'properties.activities', 'type': '[Activity]'}, - 'parameters': {'key': 'properties.parameters', 'type': '{ParameterSpecification}'}, - 'variables': {'key': 'properties.variables', 'type': '{VariableSpecification}'}, - 'concurrency': {'key': 'properties.concurrency', 'type': 'int'}, - 'annotations': {'key': 'properties.annotations', 'type': '[object]'}, - 'run_dimensions': {'key': 'properties.runDimensions', 'type': '{object}'}, - 'folder': {'key': 'properties.folder', 'type': 'PipelineFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, } def __init__( self, **kwargs ): - super(PipelineResource, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.description = kwargs.get('description', None) - self.activities = kwargs.get('activities', None) - self.parameters = kwargs.get('parameters', None) - self.variables = kwargs.get('variables', None) - self.concurrency = kwargs.get('concurrency', None) - self.annotations = kwargs.get('annotations', None) - self.run_dimensions = kwargs.get('run_dimensions', None) - self.folder = kwargs.get('folder', None) + super(PrestoSource, self).__init__(**kwargs) + self.type = 'PrestoSource' # type: str + self.query = kwargs.get('query', None) -class PipelineRun(msrest.serialization.Model): - """Information about a pipeline run. +class PrivateEndpoint(msrest.serialization.Model): + """Private endpoint details. Variables are only populated by the server, and will be ignored when sending a request. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :ivar run_id: Identifier of a run. - :vartype run_id: str - :ivar run_group_id: Identifier that correlates all the recovery runs of a pipeline run. - :vartype run_group_id: str - :ivar is_latest: Indicates if the recovered pipeline run is the latest in its group. - :vartype is_latest: bool - :ivar pipeline_name: The pipeline name. - :vartype pipeline_name: str - :ivar parameters: The full or partial list of parameter name, value pair used in the pipeline - run. - :vartype parameters: dict[str, str] - :ivar invoked_by: Entity that started the pipeline run. - :vartype invoked_by: ~azure.synapse.artifacts.models.PipelineRunInvokedBy - :ivar last_updated: The last updated timestamp for the pipeline run event in ISO8601 format. - :vartype last_updated: ~datetime.datetime - :ivar run_start: The start time of a pipeline run in ISO8601 format. - :vartype run_start: ~datetime.datetime - :ivar run_end: The end time of a pipeline run in ISO8601 format. - :vartype run_end: ~datetime.datetime - :ivar duration_in_ms: The duration of a pipeline run. - :vartype duration_in_ms: int - :ivar status: The status of a pipeline run. - :vartype status: str - :ivar message: The message from a pipeline run. - :vartype message: str + :ivar id: Resource id of the private endpoint. + :vartype id: str """ _validation = { - 'run_id': {'readonly': True}, - 'run_group_id': {'readonly': True}, - 'is_latest': {'readonly': True}, - 'pipeline_name': {'readonly': True}, - 'parameters': {'readonly': True}, - 'invoked_by': {'readonly': True}, - 'last_updated': {'readonly': True}, - 'run_start': {'readonly': True}, - 'run_end': {'readonly': True}, - 'duration_in_ms': {'readonly': True}, - 'status': {'readonly': True}, - 'message': {'readonly': True}, + 'id': {'readonly': True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'run_id': {'key': 'runId', 'type': 'str'}, - 'run_group_id': {'key': 'runGroupId', 'type': 'str'}, - 'is_latest': {'key': 'isLatest', 'type': 'bool'}, - 'pipeline_name': {'key': 'pipelineName', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{str}'}, - 'invoked_by': {'key': 'invokedBy', 'type': 'PipelineRunInvokedBy'}, - 'last_updated': {'key': 'lastUpdated', 'type': 'iso-8601'}, - 'run_start': {'key': 'runStart', 'type': 'iso-8601'}, - 'run_end': {'key': 'runEnd', 'type': 'iso-8601'}, - 'duration_in_ms': {'key': 'durationInMs', 'type': 'int'}, - 'status': {'key': 'status', 'type': 'str'}, - 'message': {'key': 'message', 'type': 'str'}, + 'id': {'key': 'id', 'type': 'str'}, } def __init__( self, **kwargs ): - super(PipelineRun, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.run_id = None - self.run_group_id = None - self.is_latest = None - self.pipeline_name = None - self.parameters = None - self.invoked_by = None - self.last_updated = None - self.run_start = None - self.run_end = None - self.duration_in_ms = None - self.status = None - self.message = None + super(PrivateEndpoint, self).__init__(**kwargs) + self.id = None -class PipelineRunInvokedBy(msrest.serialization.Model): - """Provides entity name and id that started the pipeline run. +class PrivateEndpointConnection(Resource): + """A private endpoint connection. Variables are only populated by the server, and will be ignored when sending a request. - :ivar name: Name of the entity that started the pipeline run. - :vartype name: str - :ivar id: The ID of the entity that started the run. + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. :vartype id: str - :ivar invoked_by_type: The type of the entity that started the run. - :vartype invoked_by_type: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + :param private_endpoint: The private endpoint which the connection belongs to. + :type private_endpoint: ~azure.synapse.artifacts.models.PrivateEndpoint + :param private_link_service_connection_state: Connection state of the private endpoint + connection. + :type private_link_service_connection_state: + ~azure.synapse.artifacts.models.PrivateLinkServiceConnectionState + :ivar provisioning_state: Provisioning state of the private endpoint connection. + :vartype provisioning_state: str """ _validation = { - 'name': {'readonly': True}, 'id': {'readonly': True}, - 'invoked_by_type': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'provisioning_state': {'readonly': True}, } _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, 'id': {'key': 'id', 'type': 'str'}, - 'invoked_by_type': {'key': 'invokedByType', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'private_endpoint': {'key': 'properties.privateEndpoint', 'type': 'PrivateEndpoint'}, + 'private_link_service_connection_state': {'key': 'properties.privateLinkServiceConnectionState', 'type': 'PrivateLinkServiceConnectionState'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, } def __init__( self, **kwargs ): - super(PipelineRunInvokedBy, self).__init__(**kwargs) - self.name = None - self.id = None - self.invoked_by_type = None + super(PrivateEndpointConnection, self).__init__(**kwargs) + self.private_endpoint = kwargs.get('private_endpoint', None) + self.private_link_service_connection_state = kwargs.get('private_link_service_connection_state', None) + self.provisioning_state = None -class PipelineRunsQueryResponse(msrest.serialization.Model): - """A list pipeline runs. +class PrivateLinkServiceConnectionState(msrest.serialization.Model): + """Connection state details of the private endpoint. - All required parameters must be populated in order to send to Azure. + Variables are only populated by the server, and will be ignored when sending a request. - :param value: Required. List of pipeline runs. - :type value: list[~azure.synapse.artifacts.models.PipelineRun] - :param continuation_token: The continuation token for getting the next page of results, if any - remaining results exist, null otherwise. - :type continuation_token: str + :param status: The private link service connection status. Possible values include: "Approved", + "Pending", "Rejected", "Disconnected". + :type status: str or ~azure.synapse.artifacts.models.PrivateLinkServiceConnectionStateStatus + :param description: The private link service connection description. + :type description: str + :ivar actions_required: The actions required for private link service connection. + :vartype actions_required: str """ _validation = { - 'value': {'required': True}, + 'actions_required': {'readonly': True}, } _attribute_map = { - 'value': {'key': 'value', 'type': '[PipelineRun]'}, - 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, + 'status': {'key': 'status', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'actions_required': {'key': 'actionsRequired', 'type': 'str'}, } def __init__( self, **kwargs ): - super(PipelineRunsQueryResponse, self).__init__(**kwargs) - self.value = kwargs['value'] - self.continuation_token = kwargs.get('continuation_token', None) + super(PrivateLinkServiceConnectionState, self).__init__(**kwargs) + self.status = kwargs.get('status', None) + self.description = kwargs.get('description', None) + self.actions_required = None -class PostgreSqlLinkedService(LinkedService): - """Linked service for PostgreSQL data source. +class ProxyResource(Resource): + """The resource model definition for a ARM proxy resource. It will have everything other than required location and tags. - All required parameters must be populated in order to send to Azure. + Variables are only populated by the server, and will be ignored when sending a request. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: Required. The connection string. - :type connection_string: object - :param password: The Azure key vault secret reference of password in connection string. - :type password: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str """ _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, **kwargs ): - super(PostgreSqlLinkedService, self).__init__(**kwargs) - self.type = 'PostgreSql' # type: str - self.connection_string = kwargs['connection_string'] - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - + super(ProxyResource, self).__init__(**kwargs) -class PostgreSqlTableDataset(Dataset): - """The PostgreSQL table dataset. - All required parameters must be populated in order to send to Azure. +class QueryDataFlowDebugSessionsResponse(msrest.serialization.Model): + """A list of active debug sessions. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table - properties instead. - :type table_name: object - :param table: The PostgreSQL table name. Type: string (or Expression with resultType string). - :type table: object - :param schema_type_properties_schema: The PostgreSQL schema name. Type: string (or Expression - with resultType string). - :type schema_type_properties_schema: object + :param value: Array with all active debug sessions. + :type value: list[~azure.synapse.artifacts.models.DataFlowDebugSessionInfo] + :param next_link: The link to the next page of results, if any remaining results exist. + :type next_link: str """ - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'value': {'key': 'value', 'type': '[DataFlowDebugSessionInfo]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, } def __init__( self, **kwargs ): - super(PostgreSqlTableDataset, self).__init__(**kwargs) - self.type = 'PostgreSqlTable' # type: str - self.table_name = kwargs.get('table_name', None) - self.table = kwargs.get('table', None) - self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) + super(QueryDataFlowDebugSessionsResponse, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + self.next_link = kwargs.get('next_link', None) -class PrestoLinkedService(LinkedService): - """Presto server linked service. +class QuickBooksLinkedService(LinkedService): + """QuickBooks server linked service. All required parameters must be populated in order to send to Azure. @@ -13960,42 +22294,22 @@ class PrestoLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param host: Required. The IP address or host name of the Presto server. (i.e. - 192.168.222.160). - :type host: object - :param server_version: Required. The version of the Presto server. (i.e. 0.148-t). - :type server_version: object - :param catalog: Required. The catalog context for all request against the server. - :type catalog: object - :param port: The TCP port that the Presto server uses to listen for client connections. The - default value is 8080. - :type port: object - :param authentication_type: Required. The authentication mechanism used to connect to the - Presto server. Possible values include: "Anonymous", "LDAP". - :type authentication_type: str or ~azure.synapse.artifacts.models.PrestoAuthenticationType - :param username: The user name used to connect to the Presto server. - :type username: object - :param password: The password corresponding to the user name. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The - default value is false. - :type enable_ssl: object - :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for - verifying the server when connecting over SSL. This property can only be set when using SSL on - self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust - store or from a specified PEM file. The default value is false. - :type use_system_trust_store: object - :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate - name to match the host name of the server when connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: object - :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from - the server. The default value is false. - :type allow_self_signed_server_cert: object - :param time_zone_id: The local time zone used by the connection. Valid values for this option - are specified in the IANA Time Zone Database. The default value is the system time zone. - :type time_zone_id: object + :param endpoint: Required. The endpoint of the QuickBooks server. (i.e. + quickbooks.api.intuit.com). + :type endpoint: object + :param company_id: Required. The company ID of the QuickBooks company to authorize. + :type company_id: object + :param consumer_key: Required. The consumer key for OAuth 1.0 authentication. + :type consumer_key: object + :param consumer_secret: Required. The consumer secret for OAuth 1.0 authentication. + :type consumer_secret: ~azure.synapse.artifacts.models.SecretBase + :param access_token: Required. The access token for OAuth 1.0 authentication. + :type access_token: ~azure.synapse.artifacts.models.SecretBase + :param access_token_secret: Required. The access token secret for OAuth 1.0 authentication. + :type access_token_secret: ~azure.synapse.artifacts.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using + HTTPS. The default value is true. + :type use_encrypted_endpoints: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -14004,10 +22318,12 @@ class PrestoLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'host': {'required': True}, - 'server_version': {'required': True}, - 'catalog': {'required': True}, - 'authentication_type': {'required': True}, + 'endpoint': {'required': True}, + 'company_id': {'required': True}, + 'consumer_key': {'required': True}, + 'consumer_secret': {'required': True}, + 'access_token': {'required': True}, + 'access_token_secret': {'required': True}, } _attribute_map = { @@ -14017,19 +22333,13 @@ class PrestoLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'server_version': {'key': 'typeProperties.serverVersion', 'type': 'object'}, - 'catalog': {'key': 'typeProperties.catalog', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, - 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, - 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, - 'time_zone_id': {'key': 'typeProperties.timeZoneID', 'type': 'object'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'company_id': {'key': 'typeProperties.companyId', 'type': 'object'}, + 'consumer_key': {'key': 'typeProperties.consumerKey', 'type': 'object'}, + 'consumer_secret': {'key': 'typeProperties.consumerSecret', 'type': 'SecretBase'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'access_token_secret': {'key': 'typeProperties.accessTokenSecret', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -14037,26 +22347,20 @@ def __init__( self, **kwargs ): - super(PrestoLinkedService, self).__init__(**kwargs) - self.type = 'Presto' # type: str - self.host = kwargs['host'] - self.server_version = kwargs['server_version'] - self.catalog = kwargs['catalog'] - self.port = kwargs.get('port', None) - self.authentication_type = kwargs['authentication_type'] - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.enable_ssl = kwargs.get('enable_ssl', None) - self.trusted_cert_path = kwargs.get('trusted_cert_path', None) - self.use_system_trust_store = kwargs.get('use_system_trust_store', None) - self.allow_host_name_cn_mismatch = kwargs.get('allow_host_name_cn_mismatch', None) - self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) - self.time_zone_id = kwargs.get('time_zone_id', None) + super(QuickBooksLinkedService, self).__init__(**kwargs) + self.type = 'QuickBooks' # type: str + self.endpoint = kwargs['endpoint'] + self.company_id = kwargs['company_id'] + self.consumer_key = kwargs['consumer_key'] + self.consumer_secret = kwargs['consumer_secret'] + self.access_token = kwargs['access_token'] + self.access_token_secret = kwargs['access_token_secret'] + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) -class PrestoObjectDataset(Dataset): - """Presto server dataset. +class QuickBooksObjectDataset(Dataset): + """QuickBooks server dataset. All required parameters must be populated in order to send to Azure. @@ -14082,15 +22386,8 @@ class PrestoObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table - properties instead. + :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object - :param table: The table name of the Presto. Type: string (or Expression with resultType - string). - :type table: object - :param schema_type_properties_schema: The schema name of the Presto. Type: string (or - Expression with resultType string). - :type schema_type_properties_schema: object """ _validation = { @@ -14109,183 +22406,134 @@ class PrestoObjectDataset(Dataset): 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } def __init__( self, **kwargs ): - super(PrestoObjectDataset, self).__init__(**kwargs) - self.type = 'PrestoObject' # type: str + super(QuickBooksObjectDataset, self).__init__(**kwargs) + self.type = 'QuickBooksObject' # type: str self.table_name = kwargs.get('table_name', None) - self.table = kwargs.get('table', None) - self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) -class QueryDataFlowDebugSessionsResponse(msrest.serialization.Model): - """A list of active debug sessions. +class QuickBooksSource(TabularSource): + """A copy activity QuickBooks server source. - :param value: Array with all active debug sessions. - :type value: list[~azure.synapse.artifacts.models.DataFlowDebugSessionInfo] - :param next_link: The link to the next page of results, if any remaining results exist. - :type next_link: str + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object """ + _validation = { + 'type': {'required': True}, + } + _attribute_map = { - 'value': {'key': 'value', 'type': '[DataFlowDebugSessionInfo]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, } def __init__( self, **kwargs ): - super(QueryDataFlowDebugSessionsResponse, self).__init__(**kwargs) - self.value = kwargs.get('value', None) - self.next_link = kwargs.get('next_link', None) - + super(QuickBooksSource, self).__init__(**kwargs) + self.type = 'QuickBooksSource' # type: str + self.query = kwargs.get('query', None) -class QuickBooksLinkedService(LinkedService): - """QuickBooks server linked service. - All required parameters must be populated in order to send to Azure. +class RecurrenceSchedule(msrest.serialization.Model): + """The recurrence schedule. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param endpoint: Required. The endpoint of the QuickBooks server. (i.e. - quickbooks.api.intuit.com). - :type endpoint: object - :param company_id: Required. The company ID of the QuickBooks company to authorize. - :type company_id: object - :param consumer_key: Required. The consumer key for OAuth 1.0 authentication. - :type consumer_key: object - :param consumer_secret: Required. The consumer secret for OAuth 1.0 authentication. - :type consumer_secret: ~azure.synapse.artifacts.models.SecretBase - :param access_token: Required. The access token for OAuth 1.0 authentication. - :type access_token: ~azure.synapse.artifacts.models.SecretBase - :param access_token_secret: Required. The access token secret for OAuth 1.0 authentication. - :type access_token_secret: ~azure.synapse.artifacts.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using - HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param minutes: The minutes. + :type minutes: list[int] + :param hours: The hours. + :type hours: list[int] + :param week_days: The days of the week. + :type week_days: list[str or ~azure.synapse.artifacts.models.DayOfWeek] + :param month_days: The month days. + :type month_days: list[int] + :param monthly_occurrences: The monthly occurrences. + :type monthly_occurrences: list[~azure.synapse.artifacts.models.RecurrenceScheduleOccurrence] """ - _validation = { - 'type': {'required': True}, - 'endpoint': {'required': True}, - 'company_id': {'required': True}, - 'consumer_key': {'required': True}, - 'consumer_secret': {'required': True}, - 'access_token': {'required': True}, - 'access_token_secret': {'required': True}, - } - _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'company_id': {'key': 'typeProperties.companyId', 'type': 'object'}, - 'consumer_key': {'key': 'typeProperties.consumerKey', 'type': 'object'}, - 'consumer_secret': {'key': 'typeProperties.consumerSecret', 'type': 'SecretBase'}, - 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, - 'access_token_secret': {'key': 'typeProperties.accessTokenSecret', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'minutes': {'key': 'minutes', 'type': '[int]'}, + 'hours': {'key': 'hours', 'type': '[int]'}, + 'week_days': {'key': 'weekDays', 'type': '[str]'}, + 'month_days': {'key': 'monthDays', 'type': '[int]'}, + 'monthly_occurrences': {'key': 'monthlyOccurrences', 'type': '[RecurrenceScheduleOccurrence]'}, } def __init__( self, **kwargs ): - super(QuickBooksLinkedService, self).__init__(**kwargs) - self.type = 'QuickBooks' # type: str - self.endpoint = kwargs['endpoint'] - self.company_id = kwargs['company_id'] - self.consumer_key = kwargs['consumer_key'] - self.consumer_secret = kwargs['consumer_secret'] - self.access_token = kwargs['access_token'] - self.access_token_secret = kwargs['access_token_secret'] - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - + super(RecurrenceSchedule, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.minutes = kwargs.get('minutes', None) + self.hours = kwargs.get('hours', None) + self.week_days = kwargs.get('week_days', None) + self.month_days = kwargs.get('month_days', None) + self.monthly_occurrences = kwargs.get('monthly_occurrences', None) -class QuickBooksObjectDataset(Dataset): - """QuickBooks server dataset. - All required parameters must be populated in order to send to Azure. +class RecurrenceScheduleOccurrence(msrest.serialization.Model): + """The recurrence schedule occurrence. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :param day: The day of the week. Possible values include: "Sunday", "Monday", "Tuesday", + "Wednesday", "Thursday", "Friday", "Saturday". + :type day: str or ~azure.synapse.artifacts.models.DayOfWeek + :param occurrence: The occurrence. + :type occurrence: int """ - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'day': {'key': 'day', 'type': 'str'}, + 'occurrence': {'key': 'occurrence', 'type': 'int'}, } def __init__( self, **kwargs ): - super(QuickBooksObjectDataset, self).__init__(**kwargs) - self.type = 'QuickBooksObject' # type: str - self.table_name = kwargs.get('table_name', None) + super(RecurrenceScheduleOccurrence, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.day = kwargs.get('day', None) + self.occurrence = kwargs.get('occurrence', None) class RedirectIncompatibleRowSettings(msrest.serialization.Model): @@ -14326,6 +22574,84 @@ def __init__( self.path = kwargs.get('path', None) +class RedshiftUnloadSettings(msrest.serialization.Model): + """The Amazon S3 settings needed for the interim Amazon S3 when copying from Amazon Redshift with unload. With this, data from Amazon Redshift source will be unloaded into S3 first and then copied into the targeted sink from the interim S3. + + All required parameters must be populated in order to send to Azure. + + :param s3_linked_service_name: Required. The name of the Amazon S3 linked service which will be + used for the unload operation when copying from the Amazon Redshift source. + :type s3_linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param bucket_name: Required. The bucket of the interim Amazon S3 which will be used to store + the unloaded data from Amazon Redshift source. The bucket must be in the same region as the + Amazon Redshift source. Type: string (or Expression with resultType string). + :type bucket_name: object + """ + + _validation = { + 's3_linked_service_name': {'required': True}, + 'bucket_name': {'required': True}, + } + + _attribute_map = { + 's3_linked_service_name': {'key': 's3LinkedServiceName', 'type': 'LinkedServiceReference'}, + 'bucket_name': {'key': 'bucketName', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(RedshiftUnloadSettings, self).__init__(**kwargs) + self.s3_linked_service_name = kwargs['s3_linked_service_name'] + self.bucket_name = kwargs['bucket_name'] + + +class RelationalSource(CopySource): + """A copy activity source for various relational databases. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query: Database query. Type: string (or Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(RelationalSource, self).__init__(**kwargs) + self.type = 'RelationalSource' # type: str + self.query = kwargs.get('query', None) + + class RelationalTableDataset(Dataset): """The relational table dataset. @@ -14418,20 +22744,22 @@ def __init__( self.next_link = None -class RerunTriggerResource(SubResource): +class RerunTriggerResource(AzureEntityResource): """RerunTrigger resource type. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :ivar id: The resource identifier. + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. :vartype id: str - :ivar name: The resource name. + :ivar name: The name of the resource. :vartype name: str - :ivar type: The resource type. + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. :vartype type: str - :ivar etag: Etag identifies change in the resource. + :ivar etag: Resource Etag. :vartype etag: str :param properties: Required. Properties of the rerun trigger. :type properties: ~azure.synapse.artifacts.models.RerunTumblingWindowTrigger @@ -14563,54 +22891,6 @@ def __init__( self.max_concurrency = kwargs['max_concurrency'] -class Resource(msrest.serialization.Model): - """Azure Synapse top-level resource. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :param location: The resource location. - :type location: str - :param tags: A set of tags. The resource tags. - :type tags: dict[str, str] - :ivar e_tag: Etag identifies change in the resource. - :vartype e_tag: str - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'e_tag': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'location': {'key': 'location', 'type': 'str'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'e_tag': {'key': 'eTag', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(Resource, self).__init__(**kwargs) - self.id = None - self.name = None - self.type = None - self.location = kwargs.get('location', None) - self.tags = kwargs.get('tags', None) - self.e_tag = None - - class ResponsysLinkedService(LinkedService): """Responsys linked service. @@ -14749,6 +23029,56 @@ def __init__( self.table_name = kwargs.get('table_name', None) +class ResponsysSource(TabularSource): + """A copy activity Responsys source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(ResponsysSource, self).__init__(**kwargs) + self.type = 'ResponsysSource' # type: str + self.query = kwargs.get('query', None) + + class RestResourceDataset(Dataset): """A Rest service dataset. @@ -14920,6 +23250,106 @@ def __init__( self.encrypted_credential = kwargs.get('encrypted_credential', None) +class RestSource(CopySource): + """A copy activity Rest service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param request_method: The HTTP method used to call the RESTful API. The default is GET. Type: + string (or Expression with resultType string). + :type request_method: object + :param request_body: The HTTP request body to the RESTful API if requestMethod is POST. Type: + string (or Expression with resultType string). + :type request_body: object + :param additional_headers: The additional HTTP headers in the request to the RESTful API. Type: + string (or Expression with resultType string). + :type additional_headers: object + :param pagination_rules: The pagination rules to compose next page requests. Type: string (or + Expression with resultType string). + :type pagination_rules: object + :param http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout + to get a response, not the timeout to read response data. Default value: 00:01:40. Type: string + (or Expression with resultType string), pattern: + ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type http_request_timeout: object + :param request_interval: The time to await before sending next page request. + :type request_interval: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'request_method': {'key': 'requestMethod', 'type': 'object'}, + 'request_body': {'key': 'requestBody', 'type': 'object'}, + 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, + 'pagination_rules': {'key': 'paginationRules', 'type': 'object'}, + 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, + 'request_interval': {'key': 'requestInterval', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(RestSource, self).__init__(**kwargs) + self.type = 'RestSource' # type: str + self.request_method = kwargs.get('request_method', None) + self.request_body = kwargs.get('request_body', None) + self.additional_headers = kwargs.get('additional_headers', None) + self.pagination_rules = kwargs.get('pagination_rules', None) + self.http_request_timeout = kwargs.get('http_request_timeout', None) + self.request_interval = kwargs.get('request_interval', None) + + +class RetryPolicy(msrest.serialization.Model): + """Execution policy for an activity. + + :param count: Maximum ordinary retry attempts. Default is 0. Type: integer (or Expression with + resultType integer), minimum: 0. + :type count: object + :param interval_in_seconds: Interval between retries in seconds. Default is 30. + :type interval_in_seconds: int + """ + + _validation = { + 'interval_in_seconds': {'maximum': 86400, 'minimum': 30}, + } + + _attribute_map = { + 'count': {'key': 'count', 'type': 'object'}, + 'interval_in_seconds': {'key': 'intervalInSeconds', 'type': 'int'}, + } + + def __init__( + self, + **kwargs + ): + super(RetryPolicy, self).__init__(**kwargs) + self.count = kwargs.get('count', None) + self.interval_in_seconds = kwargs.get('interval_in_seconds', None) + + class RunFilterParameters(msrest.serialization.Model): """Query parameters for listing runs. @@ -15241,6 +23671,56 @@ def __init__( self.table_name = kwargs.get('table_name', None) +class SalesforceMarketingCloudSource(TabularSource): + """A copy activity Salesforce Marketing Cloud source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(SalesforceMarketingCloudSource, self).__init__(**kwargs) + self.type = 'SalesforceMarketingCloudSource' # type: str + self.query = kwargs.get('query', None) + + class SalesforceObjectDataset(Dataset): """The Salesforce object dataset. @@ -15275,68 +23755,318 @@ class SalesforceObjectDataset(Dataset): _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'object_api_name': {'key': 'typeProperties.objectApiName', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(SalesforceObjectDataset, self).__init__(**kwargs) + self.type = 'SalesforceObject' # type: str + self.object_api_name = kwargs.get('object_api_name', None) + + +class SalesforceServiceCloudLinkedService(LinkedService): + """Linked service for Salesforce Service Cloud. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param environment_url: The URL of Salesforce Service Cloud instance. Default is + 'https://login.salesforce.com'. To copy data from sandbox, specify + 'https://test.salesforce.com'. To copy data from custom domain, specify, for example, + 'https://[domain].my.salesforce.com'. Type: string (or Expression with resultType string). + :type environment_url: object + :param username: The username for Basic authentication of the Salesforce instance. Type: string + (or Expression with resultType string). + :type username: object + :param password: The password for Basic authentication of the Salesforce instance. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param security_token: The security token is required to remotely access Salesforce instance. + :type security_token: ~azure.synapse.artifacts.models.SecretBase + :param extended_properties: Extended properties appended to the connection string. Type: string + (or Expression with resultType string). + :type extended_properties: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'environment_url': {'key': 'typeProperties.environmentUrl', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'security_token': {'key': 'typeProperties.securityToken', 'type': 'SecretBase'}, + 'extended_properties': {'key': 'typeProperties.extendedProperties', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(SalesforceServiceCloudLinkedService, self).__init__(**kwargs) + self.type = 'SalesforceServiceCloud' # type: str + self.environment_url = kwargs.get('environment_url', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.security_token = kwargs.get('security_token', None) + self.extended_properties = kwargs.get('extended_properties', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + + +class SalesforceServiceCloudObjectDataset(Dataset): + """The Salesforce Service Cloud object dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param object_api_name: The Salesforce Service Cloud object API name. Type: string (or + Expression with resultType string). + :type object_api_name: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'object_api_name': {'key': 'typeProperties.objectApiName', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(SalesforceServiceCloudObjectDataset, self).__init__(**kwargs) + self.type = 'SalesforceServiceCloudObject' # type: str + self.object_api_name = kwargs.get('object_api_name', None) + + +class SalesforceServiceCloudSink(CopySink): + """A copy activity Salesforce Service Cloud sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param write_behavior: The write behavior for the operation. Default is Insert. Possible values + include: "Insert", "Upsert". + :type write_behavior: str or ~azure.synapse.artifacts.models.SalesforceSinkWriteBehavior + :param external_id_field_name: The name of the external ID field for upsert operation. Default + value is 'Id' column. Type: string (or Expression with resultType string). + :type external_id_field_name: object + :param ignore_null_values: The flag indicating whether or not to ignore null values from input + dataset (except key fields) during write operation. Default value is false. If set it to true, + it means ADF will leave the data in the destination object unchanged when doing upsert/update + operation and insert defined default value when doing insert operation, versus ADF will update + the data in the destination object to NULL when doing upsert/update operation and insert NULL + value when doing insert operation. Type: boolean (or Expression with resultType boolean). + :type ignore_null_values: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, + 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(SalesforceServiceCloudSink, self).__init__(**kwargs) + self.type = 'SalesforceServiceCloudSink' # type: str + self.write_behavior = kwargs.get('write_behavior', None) + self.external_id_field_name = kwargs.get('external_id_field_name', None) + self.ignore_null_values = kwargs.get('ignore_null_values', None) + + +class SalesforceServiceCloudSource(CopySource): + """A copy activity Salesforce Service Cloud source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query: Database query. Type: string (or Expression with resultType string). + :type query: object + :param read_behavior: The read behavior for the operation. Default is Query. Possible values + include: "Query", "QueryAll". + :type read_behavior: str or ~azure.synapse.artifacts.models.SalesforceSourceReadBehavior + """ + + _validation = { + 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'object_api_name': {'key': 'typeProperties.objectApiName', 'type': 'object'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, } def __init__( self, **kwargs ): - super(SalesforceObjectDataset, self).__init__(**kwargs) - self.type = 'SalesforceObject' # type: str - self.object_api_name = kwargs.get('object_api_name', None) + super(SalesforceServiceCloudSource, self).__init__(**kwargs) + self.type = 'SalesforceServiceCloudSource' # type: str + self.query = kwargs.get('query', None) + self.read_behavior = kwargs.get('read_behavior', None) -class SalesforceServiceCloudLinkedService(LinkedService): - """Linked service for Salesforce Service Cloud. +class SalesforceSink(CopySink): + """A copy activity Salesforce sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Copy sink type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param environment_url: The URL of Salesforce Service Cloud instance. Default is - 'https://login.salesforce.com'. To copy data from sandbox, specify - 'https://test.salesforce.com'. To copy data from custom domain, specify, for example, - 'https://[domain].my.salesforce.com'. Type: string (or Expression with resultType string). - :type environment_url: object - :param username: The username for Basic authentication of the Salesforce instance. Type: string - (or Expression with resultType string). - :type username: object - :param password: The password for Basic authentication of the Salesforce instance. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param security_token: The security token is required to remotely access Salesforce instance. - :type security_token: ~azure.synapse.artifacts.models.SecretBase - :param extended_properties: Extended properties appended to the connection string. Type: string - (or Expression with resultType string). - :type extended_properties: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param write_behavior: The write behavior for the operation. Default is Insert. Possible values + include: "Insert", "Upsert". + :type write_behavior: str or ~azure.synapse.artifacts.models.SalesforceSinkWriteBehavior + :param external_id_field_name: The name of the external ID field for upsert operation. Default + value is 'Id' column. Type: string (or Expression with resultType string). + :type external_id_field_name: object + :param ignore_null_values: The flag indicating whether or not to ignore null values from input + dataset (except key fields) during write operation. Default value is false. If set it to true, + it means ADF will leave the data in the destination object unchanged when doing upsert/update + operation and insert defined default value when doing insert operation, versus ADF will update + the data in the destination object to NULL when doing upsert/update operation and insert NULL + value when doing insert operation. Type: boolean (or Expression with resultType boolean). + :type ignore_null_values: object """ _validation = { @@ -15346,89 +24076,79 @@ class SalesforceServiceCloudLinkedService(LinkedService): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'environment_url': {'key': 'typeProperties.environmentUrl', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'security_token': {'key': 'typeProperties.securityToken', 'type': 'SecretBase'}, - 'extended_properties': {'key': 'typeProperties.extendedProperties', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, + 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, } def __init__( self, **kwargs ): - super(SalesforceServiceCloudLinkedService, self).__init__(**kwargs) - self.type = 'SalesforceServiceCloud' # type: str - self.environment_url = kwargs.get('environment_url', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.security_token = kwargs.get('security_token', None) - self.extended_properties = kwargs.get('extended_properties', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(SalesforceSink, self).__init__(**kwargs) + self.type = 'SalesforceSink' # type: str + self.write_behavior = kwargs.get('write_behavior', None) + self.external_id_field_name = kwargs.get('external_id_field_name', None) + self.ignore_null_values = kwargs.get('ignore_null_values', None) -class SalesforceServiceCloudObjectDataset(Dataset): - """The Salesforce Service Cloud object dataset. +class SalesforceSource(TabularSource): + """A copy activity Salesforce source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param object_api_name: The Salesforce Service Cloud object API name. Type: string (or - Expression with resultType string). - :type object_api_name: object + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: Database query. Type: string (or Expression with resultType string). + :type query: object + :param read_behavior: The read behavior for the operation. Default is Query. Possible values + include: "Query", "QueryAll". + :type read_behavior: str or ~azure.synapse.artifacts.models.SalesforceSourceReadBehavior """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'object_api_name': {'key': 'typeProperties.objectApiName', 'type': 'object'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, } def __init__( self, **kwargs ): - super(SalesforceServiceCloudObjectDataset, self).__init__(**kwargs) - self.type = 'SalesforceServiceCloudObject' # type: str - self.object_api_name = kwargs.get('object_api_name', None) + super(SalesforceSource, self).__init__(**kwargs) + self.type = 'SalesforceSource' # type: str + self.query = kwargs.get('query', None) + self.read_behavior = kwargs.get('read_behavior', None) class SapBwCubeDataset(Dataset): @@ -15559,6 +24279,55 @@ def __init__( self.encrypted_credential = kwargs.get('encrypted_credential', None) +class SapBwSource(TabularSource): + """A copy activity source for SapBW server via MDX. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: MDX query. Type: string (or Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(SapBwSource, self).__init__(**kwargs) + self.type = 'SapBwSource' # type: str + self.query = kwargs.get('query', None) + + class SapCloudForCustomerLinkedService(LinkedService): """Linked service for SAP Cloud for Customer. @@ -15677,9 +24446,114 @@ def __init__( self, **kwargs ): - super(SapCloudForCustomerResourceDataset, self).__init__(**kwargs) - self.type = 'SapCloudForCustomerResource' # type: str - self.path = kwargs['path'] + super(SapCloudForCustomerResourceDataset, self).__init__(**kwargs) + self.type = 'SapCloudForCustomerResource' # type: str + self.path = kwargs['path'] + + +class SapCloudForCustomerSink(CopySink): + """A copy activity SAP Cloud for Customer sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param write_behavior: The write behavior for the operation. Default is 'Insert'. Possible + values include: "Insert", "Update". + :type write_behavior: str or + ~azure.synapse.artifacts.models.SapCloudForCustomerSinkWriteBehavior + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(SapCloudForCustomerSink, self).__init__(**kwargs) + self.type = 'SapCloudForCustomerSink' # type: str + self.write_behavior = kwargs.get('write_behavior', None) + + +class SapCloudForCustomerSource(TabularSource): + """A copy activity source for SAP Cloud for Customer source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: SAP Cloud for Customer OData query. For example, "$top=1". Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(SapCloudForCustomerSource, self).__init__(**kwargs) + self.type = 'SapCloudForCustomerSource' # type: str + self.query = kwargs.get('query', None) class SapEccLinkedService(LinkedService): @@ -15805,6 +24679,56 @@ def __init__( self.path = kwargs['path'] +class SapEccSource(TabularSource): + """A copy activity source for SAP ECC source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: SAP ECC OData query. For example, "$top=1". Type: string (or Expression with + resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(SapEccSource, self).__init__(**kwargs) + self.type = 'SapEccSource' # type: str + self.query = kwargs.get('query', None) + + class SapHanaLinkedService(LinkedService): """SAP HANA Linked Service. @@ -15877,6 +24801,90 @@ def __init__( self.encrypted_credential = kwargs.get('encrypted_credential', None) +class SapHanaPartitionSettings(msrest.serialization.Model): + """The settings that will be leveraged for SAP HANA source partitioning. + + :param partition_column_name: The name of the column that will be used for proceeding range + partitioning. Type: string (or Expression with resultType string). + :type partition_column_name: object + """ + + _attribute_map = { + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(SapHanaPartitionSettings, self).__init__(**kwargs) + self.partition_column_name = kwargs.get('partition_column_name', None) + + +class SapHanaSource(TabularSource): + """A copy activity source for SAP HANA source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: SAP HANA Sql query. Type: string (or Expression with resultType string). + :type query: object + :param packet_size: The packet size of data read from SAP HANA. Type: integer(or Expression + with resultType integer). + :type packet_size: object + :param partition_option: The partition mechanism that will be used for SAP HANA read in + parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "SapHanaDynamicRange". + :type partition_option: str or ~azure.synapse.artifacts.models.SapHanaPartitionOption + :param partition_settings: The settings that will be leveraged for SAP HANA source + partitioning. + :type partition_settings: ~azure.synapse.artifacts.models.SapHanaPartitionSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + 'packet_size': {'key': 'packetSize', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'SapHanaPartitionSettings'}, + } + + def __init__( + self, + **kwargs + ): + super(SapHanaSource, self).__init__(**kwargs) + self.type = 'SapHanaSource' # type: str + self.query = kwargs.get('query', None) + self.packet_size = kwargs.get('packet_size', None) + self.partition_option = kwargs.get('partition_option', None) + self.partition_settings = kwargs.get('partition_settings', None) + + class SapHanaTableDataset(Dataset): """SAP HANA Table properties. @@ -16022,6 +25030,62 @@ def __init__( self.encrypted_credential = kwargs.get('encrypted_credential', None) +class SapOpenHubSource(TabularSource): + """A copy activity source for SAP Business Warehouse Open Hub Destination source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param exclude_last_request: Whether to exclude the records of the last request. The default + value is true. Type: boolean (or Expression with resultType boolean). + :type exclude_last_request: object + :param base_request_id: The ID of request for delta loading. Once it is set, only data with + requestId larger than the value of this property will be retrieved. The default value is 0. + Type: integer (or Expression with resultType integer ). + :type base_request_id: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'exclude_last_request': {'key': 'excludeLastRequest', 'type': 'object'}, + 'base_request_id': {'key': 'baseRequestId', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(SapOpenHubSource, self).__init__(**kwargs) + self.type = 'SapOpenHubSource' # type: str + self.exclude_last_request = kwargs.get('exclude_last_request', None) + self.base_request_id = kwargs.get('base_request_id', None) + + class SapOpenHubTableDataset(Dataset): """Sap Business Warehouse Open Hub Destination Table properties. @@ -16216,6 +25280,43 @@ def __init__( self.encrypted_credential = kwargs.get('encrypted_credential', None) +class SapTablePartitionSettings(msrest.serialization.Model): + """The settings that will be leveraged for SAP table source partitioning. + + :param partition_column_name: The name of the column that will be used for proceeding range + partitioning. Type: string (or Expression with resultType string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of column specified in partitionColumnName that + will be used for proceeding range partitioning. Type: string (or Expression with resultType + string). + :type partition_upper_bound: object + :param partition_lower_bound: The minimum value of column specified in partitionColumnName that + will be used for proceeding range partitioning. Type: string (or Expression with resultType + string). + :type partition_lower_bound: object + :param max_partitions_number: The maximum value of partitions the table will be split into. + Type: integer (or Expression with resultType string). + :type max_partitions_number: object + """ + + _attribute_map = { + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + 'max_partitions_number': {'key': 'maxPartitionsNumber', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(SapTablePartitionSettings, self).__init__(**kwargs) + self.partition_column_name = kwargs.get('partition_column_name', None) + self.partition_upper_bound = kwargs.get('partition_upper_bound', None) + self.partition_lower_bound = kwargs.get('partition_lower_bound', None) + self.max_partitions_number = kwargs.get('max_partitions_number', None) + + class SapTableResourceDataset(Dataset): """SAP Table Resource properties. @@ -16271,9 +25372,190 @@ def __init__( self, **kwargs ): - super(SapTableResourceDataset, self).__init__(**kwargs) - self.type = 'SapTableResource' # type: str - self.table_name = kwargs['table_name'] + super(SapTableResourceDataset, self).__init__(**kwargs) + self.type = 'SapTableResource' # type: str + self.table_name = kwargs['table_name'] + + +class SapTableSource(TabularSource): + """A copy activity source for SAP Table source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param row_count: The number of rows to be retrieved. Type: integer(or Expression with + resultType integer). + :type row_count: object + :param row_skips: The number of rows that will be skipped. Type: integer (or Expression with + resultType integer). + :type row_skips: object + :param rfc_table_fields: The fields of the SAP table that will be retrieved. For example, + column0, column1. Type: string (or Expression with resultType string). + :type rfc_table_fields: object + :param rfc_table_options: The options for the filtering of the SAP Table. For example, COLUMN0 + EQ SOME VALUE. Type: string (or Expression with resultType string). + :type rfc_table_options: object + :param batch_size: Specifies the maximum number of rows that will be retrieved at a time when + retrieving data from SAP Table. Type: integer (or Expression with resultType integer). + :type batch_size: object + :param custom_rfc_read_table_function_module: Specifies the custom RFC function module that + will be used to read data from SAP Table. Type: string (or Expression with resultType string). + :type custom_rfc_read_table_function_module: object + :param partition_option: The partition mechanism that will be used for SAP table read in + parallel. Possible values include: "None", "PartitionOnInt", "PartitionOnCalendarYear", + "PartitionOnCalendarMonth", "PartitionOnCalendarDate", "PartitionOnTime". + :type partition_option: str or ~azure.synapse.artifacts.models.SapTablePartitionOption + :param partition_settings: The settings that will be leveraged for SAP table source + partitioning. + :type partition_settings: ~azure.synapse.artifacts.models.SapTablePartitionSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'row_count': {'key': 'rowCount', 'type': 'object'}, + 'row_skips': {'key': 'rowSkips', 'type': 'object'}, + 'rfc_table_fields': {'key': 'rfcTableFields', 'type': 'object'}, + 'rfc_table_options': {'key': 'rfcTableOptions', 'type': 'object'}, + 'batch_size': {'key': 'batchSize', 'type': 'object'}, + 'custom_rfc_read_table_function_module': {'key': 'customRfcReadTableFunctionModule', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'SapTablePartitionSettings'}, + } + + def __init__( + self, + **kwargs + ): + super(SapTableSource, self).__init__(**kwargs) + self.type = 'SapTableSource' # type: str + self.row_count = kwargs.get('row_count', None) + self.row_skips = kwargs.get('row_skips', None) + self.rfc_table_fields = kwargs.get('rfc_table_fields', None) + self.rfc_table_options = kwargs.get('rfc_table_options', None) + self.batch_size = kwargs.get('batch_size', None) + self.custom_rfc_read_table_function_module = kwargs.get('custom_rfc_read_table_function_module', None) + self.partition_option = kwargs.get('partition_option', None) + self.partition_settings = kwargs.get('partition_settings', None) + + +class ScheduleTrigger(MultiplePipelineTrigger): + """Trigger that creates pipeline runs periodically, on schedule. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Trigger type.Constant filled by server. + :type type: str + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are + called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". + :vartype runtime_state: str or ~azure.synapse.artifacts.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the trigger. + :type annotations: list[object] + :param pipelines: Pipelines that need to be started. + :type pipelines: list[~azure.synapse.artifacts.models.TriggerPipelineReference] + :param recurrence: Required. Recurrence schedule configuration. + :type recurrence: ~azure.synapse.artifacts.models.ScheduleTriggerRecurrence + """ + + _validation = { + 'type': {'required': True}, + 'runtime_state': {'readonly': True}, + 'recurrence': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, + 'recurrence': {'key': 'typeProperties.recurrence', 'type': 'ScheduleTriggerRecurrence'}, + } + + def __init__( + self, + **kwargs + ): + super(ScheduleTrigger, self).__init__(**kwargs) + self.type = 'ScheduleTrigger' # type: str + self.recurrence = kwargs['recurrence'] + + +class ScheduleTriggerRecurrence(msrest.serialization.Model): + """The workflow trigger recurrence. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param frequency: The frequency. Possible values include: "NotSpecified", "Minute", "Hour", + "Day", "Week", "Month", "Year". + :type frequency: str or ~azure.synapse.artifacts.models.RecurrenceFrequency + :param interval: The interval. + :type interval: int + :param start_time: The start time. + :type start_time: ~datetime.datetime + :param end_time: The end time. + :type end_time: ~datetime.datetime + :param time_zone: The time zone. + :type time_zone: str + :param schedule: The recurrence schedule. + :type schedule: ~azure.synapse.artifacts.models.RecurrenceSchedule + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'frequency': {'key': 'frequency', 'type': 'str'}, + 'interval': {'key': 'interval', 'type': 'int'}, + 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, + 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, + 'time_zone': {'key': 'timeZone', 'type': 'str'}, + 'schedule': {'key': 'schedule', 'type': 'RecurrenceSchedule'}, + } + + def __init__( + self, + **kwargs + ): + super(ScheduleTriggerRecurrence, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.frequency = kwargs.get('frequency', None) + self.interval = kwargs.get('interval', None) + self.start_time = kwargs.get('start_time', None) + self.end_time = kwargs.get('end_time', None) + self.time_zone = kwargs.get('time_zone', None) + self.schedule = kwargs.get('schedule', None) class ScriptAction(msrest.serialization.Model): @@ -16346,6 +25628,80 @@ def __init__( self.value = kwargs['value'] +class SelfDependencyTumblingWindowTriggerReference(DependencyReference): + """Self referenced tumbling window trigger dependency. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. The type of dependency reference.Constant filled by server. + :type type: str + :param offset: Required. Timespan applied to the start time of a tumbling window when + evaluating dependency. + :type offset: str + :param size: The size of the window when evaluating the dependency. If undefined the frequency + of the tumbling window will be used. + :type size: str + """ + + _validation = { + 'type': {'required': True}, + 'offset': {'required': True, 'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, + 'size': {'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'offset': {'key': 'offset', 'type': 'str'}, + 'size': {'key': 'size', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(SelfDependencyTumblingWindowTriggerReference, self).__init__(**kwargs) + self.type = 'SelfDependencyTumblingWindowTriggerReference' # type: str + self.offset = kwargs['offset'] + self.size = kwargs.get('size', None) + + +class SelfHostedIntegrationRuntime(IntegrationRuntime): + """Self-hosted integration runtime. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of integration runtime.Constant filled by server. Possible values + include: "Managed", "SelfHosted". + :type type: str or ~azure.synapse.artifacts.models.IntegrationRuntimeType + :param description: Integration runtime description. + :type description: str + :param linked_info: The base definition of a linked integration runtime. + :type linked_info: ~azure.synapse.artifacts.models.LinkedIntegrationRuntimeType + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'linked_info': {'key': 'typeProperties.linkedInfo', 'type': 'LinkedIntegrationRuntimeType'}, + } + + def __init__( + self, + **kwargs + ): + super(SelfHostedIntegrationRuntime, self).__init__(**kwargs) + self.type = 'SelfHosted' # type: str + self.linked_info = kwargs.get('linked_info', None) + + class ServiceNowLinkedService(LinkedService): """ServiceNow server linked service. @@ -16497,6 +25853,56 @@ def __init__( self.table_name = kwargs.get('table_name', None) +class ServiceNowSource(TabularSource): + """A copy activity ServiceNow server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(ServiceNowSource, self).__init__(**kwargs) + self.type = 'ServiceNowSource' # type: str + self.query = kwargs.get('query', None) + + class SetVariableActivity(Activity): """Set value for a Variable. @@ -16547,6 +25953,101 @@ def __init__( self.value = kwargs.get('value', None) +class SftpLocation(DatasetLocation): + """The location of SFTP dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location.Constant filled by server. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(SftpLocation, self).__init__(**kwargs) + self.type = 'SftpLocation' # type: str + + +class SftpReadSettings(StoreReadSettings): + """Sftp read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The read setting type.Constant filled by server. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :type recursive: object + :param wildcard_folder_path: Sftp wildcardFolderPath. Type: string (or Expression with + resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Sftp wildcardFileName. Type: string (or Expression with resultType + string). + :type wildcard_file_name: object + :param modified_datetime_start: The start of file's modified datetime. Type: string (or + Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression + with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(SftpReadSettings, self).__init__(**kwargs) + self.type = 'SftpReadSettings' # type: str + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) + + class SftpServerLinkedService(LinkedService): """A linked service for an SSH File Transfer Protocol (SFTP) server. @@ -16648,6 +26149,47 @@ def __init__( self.host_key_fingerprint = kwargs.get('host_key_fingerprint', None) +class SftpWriteSettings(StoreWriteSettings): + """Sftp write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The write setting type.Constant filled by server. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + :param operation_timeout: Specifies the timeout for writing each chunk to SFTP server. Default + value: 01:00:00 (one hour). Type: string (or Expression with resultType string). + :type operation_timeout: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'operation_timeout': {'key': 'operationTimeout', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(SftpWriteSettings, self).__init__(**kwargs) + self.type = 'SftpWriteSettings' # type: str + self.operation_timeout = kwargs.get('operation_timeout', None) + + class ShopifyLinkedService(LinkedService): """Shopify Service linked service. @@ -16752,31 +26294,109 @@ class ShopifyObjectDataset(Dataset): :type table_name: object """ - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(ShopifyObjectDataset, self).__init__(**kwargs) + self.type = 'ShopifyObject' # type: str + self.table_name = kwargs.get('table_name', None) + + +class ShopifySource(TabularSource): + """A copy activity Shopify Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(ShopifySource, self).__init__(**kwargs) + self.type = 'ShopifySource' # type: str + self.query = kwargs.get('query', None) + + +class Sku(msrest.serialization.Model): + """SQL pool SKU. + + :param tier: The service tier. + :type tier: str + :param name: The SKU name. + :type name: str + :param capacity: If the SKU supports scale out/in then the capacity integer should be included. + If scale out/in is not possible for the resource this may be omitted. + :type capacity: int + """ _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'tier': {'key': 'tier', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'capacity': {'key': 'capacity', 'type': 'int'}, } def __init__( self, **kwargs ): - super(ShopifyObjectDataset, self).__init__(**kwargs) - self.type = 'ShopifyObject' # type: str - self.table_name = kwargs.get('table_name', None) + super(Sku, self).__init__(**kwargs) + self.tier = kwargs.get('tier', None) + self.name = kwargs.get('name', None) + self.capacity = kwargs.get('capacity', None) class SparkBatchJob(msrest.serialization.Model): @@ -16971,20 +26591,22 @@ def __init__( self.job_properties = kwargs['job_properties'] -class SparkJobDefinitionResource(SubResource): +class SparkJobDefinitionResource(AzureEntityResource): """Spark job definition resource type. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :ivar id: The resource identifier. + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. :vartype id: str - :ivar name: The resource name. + :ivar name: The name of the resource. :vartype name: str - :ivar type: The resource type. + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. :vartype type: str - :ivar etag: Etag identifies change in the resource. + :ivar etag: Resource Etag. :vartype etag: str :param properties: Required. Properties of spark job definition. :type properties: ~azure.synapse.artifacts.models.SparkJobDefinition @@ -17343,179 +26965,619 @@ class SparkRequest(msrest.serialization.Model): """ _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'file': {'key': 'file', 'type': 'str'}, - 'class_name': {'key': 'className', 'type': 'str'}, - 'arguments': {'key': 'args', 'type': '[str]'}, - 'jars': {'key': 'jars', 'type': '[str]'}, - 'python_files': {'key': 'pyFiles', 'type': '[str]'}, - 'files': {'key': 'files', 'type': '[str]'}, - 'archives': {'key': 'archives', 'type': '[str]'}, - 'configuration': {'key': 'conf', 'type': '{str}'}, - 'driver_memory': {'key': 'driverMemory', 'type': 'str'}, - 'driver_cores': {'key': 'driverCores', 'type': 'int'}, - 'executor_memory': {'key': 'executorMemory', 'type': 'str'}, - 'executor_cores': {'key': 'executorCores', 'type': 'int'}, - 'executor_count': {'key': 'numExecutors', 'type': 'int'}, + 'name': {'key': 'name', 'type': 'str'}, + 'file': {'key': 'file', 'type': 'str'}, + 'class_name': {'key': 'className', 'type': 'str'}, + 'arguments': {'key': 'args', 'type': '[str]'}, + 'jars': {'key': 'jars', 'type': '[str]'}, + 'python_files': {'key': 'pyFiles', 'type': '[str]'}, + 'files': {'key': 'files', 'type': '[str]'}, + 'archives': {'key': 'archives', 'type': '[str]'}, + 'configuration': {'key': 'conf', 'type': '{str}'}, + 'driver_memory': {'key': 'driverMemory', 'type': 'str'}, + 'driver_cores': {'key': 'driverCores', 'type': 'int'}, + 'executor_memory': {'key': 'executorMemory', 'type': 'str'}, + 'executor_cores': {'key': 'executorCores', 'type': 'int'}, + 'executor_count': {'key': 'numExecutors', 'type': 'int'}, + } + + def __init__( + self, + **kwargs + ): + super(SparkRequest, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.file = kwargs.get('file', None) + self.class_name = kwargs.get('class_name', None) + self.arguments = kwargs.get('arguments', None) + self.jars = kwargs.get('jars', None) + self.python_files = kwargs.get('python_files', None) + self.files = kwargs.get('files', None) + self.archives = kwargs.get('archives', None) + self.configuration = kwargs.get('configuration', None) + self.driver_memory = kwargs.get('driver_memory', None) + self.driver_cores = kwargs.get('driver_cores', None) + self.executor_memory = kwargs.get('executor_memory', None) + self.executor_cores = kwargs.get('executor_cores', None) + self.executor_count = kwargs.get('executor_count', None) + + +class SparkScheduler(msrest.serialization.Model): + """SparkScheduler. + + :param submitted_at: + :type submitted_at: ~datetime.datetime + :param scheduled_at: + :type scheduled_at: ~datetime.datetime + :param ended_at: + :type ended_at: ~datetime.datetime + :param cancellation_requested_at: + :type cancellation_requested_at: ~datetime.datetime + :param current_state: Possible values include: "Queued", "Scheduled", "Ended". + :type current_state: str or ~azure.synapse.artifacts.models.SchedulerCurrentState + """ + + _attribute_map = { + 'submitted_at': {'key': 'submittedAt', 'type': 'iso-8601'}, + 'scheduled_at': {'key': 'scheduledAt', 'type': 'iso-8601'}, + 'ended_at': {'key': 'endedAt', 'type': 'iso-8601'}, + 'cancellation_requested_at': {'key': 'cancellationRequestedAt', 'type': 'iso-8601'}, + 'current_state': {'key': 'currentState', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(SparkScheduler, self).__init__(**kwargs) + self.submitted_at = kwargs.get('submitted_at', None) + self.scheduled_at = kwargs.get('scheduled_at', None) + self.ended_at = kwargs.get('ended_at', None) + self.cancellation_requested_at = kwargs.get('cancellation_requested_at', None) + self.current_state = kwargs.get('current_state', None) + + +class SparkServiceError(msrest.serialization.Model): + """SparkServiceError. + + :param message: + :type message: str + :param error_code: + :type error_code: str + :param source: Possible values include: "System", "User", "Unknown", "Dependency". + :type source: str or ~azure.synapse.artifacts.models.SparkErrorSource + """ + + _attribute_map = { + 'message': {'key': 'message', 'type': 'str'}, + 'error_code': {'key': 'errorCode', 'type': 'str'}, + 'source': {'key': 'source', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(SparkServiceError, self).__init__(**kwargs) + self.message = kwargs.get('message', None) + self.error_code = kwargs.get('error_code', None) + self.source = kwargs.get('source', None) + + +class SparkServicePlugin(msrest.serialization.Model): + """SparkServicePlugin. + + :param preparation_started_at: + :type preparation_started_at: ~datetime.datetime + :param resource_acquisition_started_at: + :type resource_acquisition_started_at: ~datetime.datetime + :param submission_started_at: + :type submission_started_at: ~datetime.datetime + :param monitoring_started_at: + :type monitoring_started_at: ~datetime.datetime + :param cleanup_started_at: + :type cleanup_started_at: ~datetime.datetime + :param current_state: Possible values include: "Preparation", "ResourceAcquisition", "Queued", + "Submission", "Monitoring", "Cleanup", "Ended". + :type current_state: str or ~azure.synapse.artifacts.models.PluginCurrentState + """ + + _attribute_map = { + 'preparation_started_at': {'key': 'preparationStartedAt', 'type': 'iso-8601'}, + 'resource_acquisition_started_at': {'key': 'resourceAcquisitionStartedAt', 'type': 'iso-8601'}, + 'submission_started_at': {'key': 'submissionStartedAt', 'type': 'iso-8601'}, + 'monitoring_started_at': {'key': 'monitoringStartedAt', 'type': 'iso-8601'}, + 'cleanup_started_at': {'key': 'cleanupStartedAt', 'type': 'iso-8601'}, + 'current_state': {'key': 'currentState', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(SparkServicePlugin, self).__init__(**kwargs) + self.preparation_started_at = kwargs.get('preparation_started_at', None) + self.resource_acquisition_started_at = kwargs.get('resource_acquisition_started_at', None) + self.submission_started_at = kwargs.get('submission_started_at', None) + self.monitoring_started_at = kwargs.get('monitoring_started_at', None) + self.cleanup_started_at = kwargs.get('cleanup_started_at', None) + self.current_state = kwargs.get('current_state', None) + + +class SparkSource(TabularSource): + """A copy activity Spark Server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(SparkSource, self).__init__(**kwargs) + self.type = 'SparkSource' # type: str + self.query = kwargs.get('query', None) + + +class SqlConnection(msrest.serialization.Model): + """The connection used to execute the SQL script. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The type of the connection. Possible values include: "SqlOnDemand", + "SqlPool". + :type type: str or ~azure.synapse.artifacts.models.SqlConnectionType + :param name: Required. The identifier of the connection. + :type name: str + """ + + _validation = { + 'type': {'required': True}, + 'name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(SqlConnection, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = kwargs['type'] + self.name = kwargs['name'] + + +class SqlDWSink(CopySink): + """A copy activity SQL Data Warehouse sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType + string). + :type pre_copy_script: object + :param allow_poly_base: Indicates to use PolyBase to copy data into SQL Data Warehouse when + applicable. Type: boolean (or Expression with resultType boolean). + :type allow_poly_base: object + :param poly_base_settings: Specifies PolyBase-related settings when allowPolyBase is true. + :type poly_base_settings: ~azure.synapse.artifacts.models.PolybaseSettings + :param allow_copy_command: Indicates to use Copy Command to copy data into SQL Data Warehouse. + Type: boolean (or Expression with resultType boolean). + :type allow_copy_command: object + :param copy_command_settings: Specifies Copy Command related settings when allowCopyCommand is + true. + :type copy_command_settings: ~azure.synapse.artifacts.models.DWCopyCommandSettings + :param table_option: The option to handle sink table, such as autoCreate. For now only + 'autoCreate' value is supported. Type: string (or Expression with resultType string). + :type table_option: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'allow_poly_base': {'key': 'allowPolyBase', 'type': 'object'}, + 'poly_base_settings': {'key': 'polyBaseSettings', 'type': 'PolybaseSettings'}, + 'allow_copy_command': {'key': 'allowCopyCommand', 'type': 'object'}, + 'copy_command_settings': {'key': 'copyCommandSettings', 'type': 'DWCopyCommandSettings'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, } def __init__( self, **kwargs ): - super(SparkRequest, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - self.file = kwargs.get('file', None) - self.class_name = kwargs.get('class_name', None) - self.arguments = kwargs.get('arguments', None) - self.jars = kwargs.get('jars', None) - self.python_files = kwargs.get('python_files', None) - self.files = kwargs.get('files', None) - self.archives = kwargs.get('archives', None) - self.configuration = kwargs.get('configuration', None) - self.driver_memory = kwargs.get('driver_memory', None) - self.driver_cores = kwargs.get('driver_cores', None) - self.executor_memory = kwargs.get('executor_memory', None) - self.executor_cores = kwargs.get('executor_cores', None) - self.executor_count = kwargs.get('executor_count', None) + super(SqlDWSink, self).__init__(**kwargs) + self.type = 'SqlDWSink' # type: str + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.allow_poly_base = kwargs.get('allow_poly_base', None) + self.poly_base_settings = kwargs.get('poly_base_settings', None) + self.allow_copy_command = kwargs.get('allow_copy_command', None) + self.copy_command_settings = kwargs.get('copy_command_settings', None) + self.table_option = kwargs.get('table_option', None) -class SparkScheduler(msrest.serialization.Model): - """SparkScheduler. +class SqlDWSource(TabularSource): + """A copy activity SQL Data Warehouse source. - :param submitted_at: - :type submitted_at: ~datetime.datetime - :param scheduled_at: - :type scheduled_at: ~datetime.datetime - :param ended_at: - :type ended_at: ~datetime.datetime - :param cancellation_requested_at: - :type cancellation_requested_at: ~datetime.datetime - :param current_state: Possible values include: "Queued", "Scheduled", "Ended". - :type current_state: str or ~azure.synapse.artifacts.models.SchedulerCurrentState + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param sql_reader_query: SQL Data Warehouse reader query. Type: string (or Expression with + resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Data Warehouse + source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression + with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored procedure parameters. + Example: "{Parameter1: {value: "1", type: "int"}}". Type: object (or Expression with resultType + object), itemType: StoredProcedureParameter. + :type stored_procedure_parameters: object """ + _validation = { + 'type': {'required': True}, + } + _attribute_map = { - 'submitted_at': {'key': 'submittedAt', 'type': 'iso-8601'}, - 'scheduled_at': {'key': 'scheduledAt', 'type': 'iso-8601'}, - 'ended_at': {'key': 'endedAt', 'type': 'iso-8601'}, - 'cancellation_requested_at': {'key': 'cancellationRequestedAt', 'type': 'iso-8601'}, - 'current_state': {'key': 'currentState', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': 'object'}, } def __init__( self, **kwargs ): - super(SparkScheduler, self).__init__(**kwargs) - self.submitted_at = kwargs.get('submitted_at', None) - self.scheduled_at = kwargs.get('scheduled_at', None) - self.ended_at = kwargs.get('ended_at', None) - self.cancellation_requested_at = kwargs.get('cancellation_requested_at', None) - self.current_state = kwargs.get('current_state', None) + super(SqlDWSource, self).__init__(**kwargs) + self.type = 'SqlDWSource' # type: str + self.sql_reader_query = kwargs.get('sql_reader_query', None) + self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) -class SparkServiceError(msrest.serialization.Model): - """SparkServiceError. +class SqlMISink(CopySink): + """A copy activity Azure SQL Managed Instance sink. - :param message: - :type message: str - :param error_code: - :type error_code: str - :param source: Possible values include: "System", "User", "Unknown", "Dependency". - :type source: str or ~azure.synapse.artifacts.models.SparkErrorSource + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or + Expression with resultType string). + :type sql_writer_stored_procedure_name: object + :param sql_writer_table_type: SQL writer table type. Type: string (or Expression with + resultType string). + :type sql_writer_table_type: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType + string). + :type pre_copy_script: object + :param stored_procedure_parameters: SQL stored procedure parameters. + :type stored_procedure_parameters: dict[str, + ~azure.synapse.artifacts.models.StoredProcedureParameter] + :param stored_procedure_table_type_parameter_name: The stored procedure parameter name of the + table type. Type: string (or Expression with resultType string). + :type stored_procedure_table_type_parameter_name: object + :param table_option: The option to handle sink table, such as autoCreate. For now only + 'autoCreate' value is supported. Type: string (or Expression with resultType string). + :type table_option: object """ + _validation = { + 'type': {'required': True}, + } + _attribute_map = { - 'message': {'key': 'message', 'type': 'str'}, - 'error_code': {'key': 'errorCode', 'type': 'str'}, - 'source': {'key': 'source', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, + 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, } def __init__( self, **kwargs ): - super(SparkServiceError, self).__init__(**kwargs) - self.message = kwargs.get('message', None) - self.error_code = kwargs.get('error_code', None) - self.source = kwargs.get('source', None) + super(SqlMISink, self).__init__(**kwargs) + self.type = 'SqlMISink' # type: str + self.sql_writer_stored_procedure_name = kwargs.get('sql_writer_stored_procedure_name', None) + self.sql_writer_table_type = kwargs.get('sql_writer_table_type', None) + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None) + self.table_option = kwargs.get('table_option', None) -class SparkServicePlugin(msrest.serialization.Model): - """SparkServicePlugin. +class SqlMISource(TabularSource): + """A copy activity Azure SQL Managed Instance source. - :param preparation_started_at: - :type preparation_started_at: ~datetime.datetime - :param resource_acquisition_started_at: - :type resource_acquisition_started_at: ~datetime.datetime - :param submission_started_at: - :type submission_started_at: ~datetime.datetime - :param monitoring_started_at: - :type monitoring_started_at: ~datetime.datetime - :param cleanup_started_at: - :type cleanup_started_at: ~datetime.datetime - :param current_state: Possible values include: "Preparation", "ResourceAcquisition", "Queued", - "Submission", "Monitoring", "Cleanup", "Ended". - :type current_state: str or ~azure.synapse.artifacts.models.PluginCurrentState + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for a Azure SQL Managed + Instance source. This cannot be used at the same time as SqlReaderQuery. Type: string (or + Expression with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored procedure parameters. + Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.synapse.artifacts.models.StoredProcedureParameter] + :param produce_additional_types: Which additional types to produce. + :type produce_additional_types: object """ + _validation = { + 'type': {'required': True}, + } + _attribute_map = { - 'preparation_started_at': {'key': 'preparationStartedAt', 'type': 'iso-8601'}, - 'resource_acquisition_started_at': {'key': 'resourceAcquisitionStartedAt', 'type': 'iso-8601'}, - 'submission_started_at': {'key': 'submissionStartedAt', 'type': 'iso-8601'}, - 'monitoring_started_at': {'key': 'monitoringStartedAt', 'type': 'iso-8601'}, - 'cleanup_started_at': {'key': 'cleanupStartedAt', 'type': 'iso-8601'}, - 'current_state': {'key': 'currentState', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, } def __init__( self, **kwargs ): - super(SparkServicePlugin, self).__init__(**kwargs) - self.preparation_started_at = kwargs.get('preparation_started_at', None) - self.resource_acquisition_started_at = kwargs.get('resource_acquisition_started_at', None) - self.submission_started_at = kwargs.get('submission_started_at', None) - self.monitoring_started_at = kwargs.get('monitoring_started_at', None) - self.cleanup_started_at = kwargs.get('cleanup_started_at', None) - self.current_state = kwargs.get('current_state', None) + super(SqlMISource, self).__init__(**kwargs) + self.type = 'SqlMISource' # type: str + self.sql_reader_query = kwargs.get('sql_reader_query', None) + self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.produce_additional_types = kwargs.get('produce_additional_types', None) -class SqlConnection(msrest.serialization.Model): - """The connection used to execute the SQL script. +class SqlPool(TrackedResource): + """A SQL Analytics pool. + + Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. The type of the connection. Possible values include: "SqlOnDemand", - "SqlPool". - :type type: str or ~azure.synapse.artifacts.models.SqlConnectionType - :param name: Required. The identifier of the connection. - :type name: str + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + :param tags: A set of tags. Resource tags. + :type tags: dict[str, str] + :param location: Required. The geo-location where the resource lives. + :type location: str + :param sku: SQL pool SKU. + :type sku: ~azure.synapse.artifacts.models.Sku + :param max_size_bytes: Maximum size in bytes. + :type max_size_bytes: long + :param collation: Collation mode. + :type collation: str + :param source_database_id: Source database to create from. + :type source_database_id: str + :param recoverable_database_id: Backup database to restore from. + :type recoverable_database_id: str + :param provisioning_state: Resource state. + :type provisioning_state: str + :param status: Resource status. + :type status: str + :param restore_point_in_time: Snapshot time to restore. + :type restore_point_in_time: ~datetime.datetime + :param create_mode: What is this?. + :type create_mode: str + :param creation_date: Date the SQL pool was created. + :type creation_date: ~datetime.datetime """ - _validation = { - 'type': {'required': True}, - 'name': {'required': True}, - } + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'location': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'location': {'key': 'location', 'type': 'str'}, + 'sku': {'key': 'sku', 'type': 'Sku'}, + 'max_size_bytes': {'key': 'properties.maxSizeBytes', 'type': 'long'}, + 'collation': {'key': 'properties.collation', 'type': 'str'}, + 'source_database_id': {'key': 'properties.sourceDatabaseId', 'type': 'str'}, + 'recoverable_database_id': {'key': 'properties.recoverableDatabaseId', 'type': 'str'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + 'status': {'key': 'properties.status', 'type': 'str'}, + 'restore_point_in_time': {'key': 'properties.restorePointInTime', 'type': 'iso-8601'}, + 'create_mode': {'key': 'properties.createMode', 'type': 'str'}, + 'creation_date': {'key': 'properties.creationDate', 'type': 'iso-8601'}, + } + + def __init__( + self, + **kwargs + ): + super(SqlPool, self).__init__(**kwargs) + self.sku = kwargs.get('sku', None) + self.max_size_bytes = kwargs.get('max_size_bytes', None) + self.collation = kwargs.get('collation', None) + self.source_database_id = kwargs.get('source_database_id', None) + self.recoverable_database_id = kwargs.get('recoverable_database_id', None) + self.provisioning_state = kwargs.get('provisioning_state', None) + self.status = kwargs.get('status', None) + self.restore_point_in_time = kwargs.get('restore_point_in_time', None) + self.create_mode = kwargs.get('create_mode', None) + self.creation_date = kwargs.get('creation_date', None) + + +class SqlPoolInfoListResult(msrest.serialization.Model): + """List of SQL pools. + + :param next_link: Link to the next page of results. + :type next_link: str + :param value: List of SQL pools. + :type value: list[~azure.synapse.artifacts.models.SqlPool] + """ _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'value': {'key': 'value', 'type': '[SqlPool]'}, } def __init__( self, **kwargs ): - super(SqlConnection, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.type = kwargs['type'] - self.name = kwargs['name'] + super(SqlPoolInfoListResult, self).__init__(**kwargs) + self.next_link = kwargs.get('next_link', None) + self.value = kwargs.get('value', None) class SqlPoolReference(msrest.serialization.Model): @@ -17708,20 +27770,22 @@ def __init__( self.language = kwargs.get('language', None) -class SqlScriptResource(SubResource): +class SqlScriptResource(AzureEntityResource): """Sql Script resource type. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :ivar id: The resource identifier. + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. :vartype id: str - :ivar name: The resource name. + :ivar name: The name of the resource. :vartype name: str - :ivar type: The resource type. + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. :vartype type: str - :ivar etag: Etag identifies change in the resource. + :ivar etag: Resource Etag. :vartype etag: str :param properties: Required. Properties of sql script. :type properties: ~azure.synapse.artifacts.models.SqlScript @@ -17842,6 +27906,150 @@ def __init__( self.encrypted_credential = kwargs.get('encrypted_credential', None) +class SqlServerSink(CopySink): + """A copy activity SQL server sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or + Expression with resultType string). + :type sql_writer_stored_procedure_name: object + :param sql_writer_table_type: SQL writer table type. Type: string (or Expression with + resultType string). + :type sql_writer_table_type: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType + string). + :type pre_copy_script: object + :param stored_procedure_parameters: SQL stored procedure parameters. + :type stored_procedure_parameters: dict[str, + ~azure.synapse.artifacts.models.StoredProcedureParameter] + :param stored_procedure_table_type_parameter_name: The stored procedure parameter name of the + table type. Type: string (or Expression with resultType string). + :type stored_procedure_table_type_parameter_name: object + :param table_option: The option to handle sink table, such as autoCreate. For now only + 'autoCreate' value is supported. Type: string (or Expression with resultType string). + :type table_option: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, + 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(SqlServerSink, self).__init__(**kwargs) + self.type = 'SqlServerSink' # type: str + self.sql_writer_stored_procedure_name = kwargs.get('sql_writer_stored_procedure_name', None) + self.sql_writer_table_type = kwargs.get('sql_writer_table_type', None) + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None) + self.table_option = kwargs.get('table_option', None) + + +class SqlServerSource(TabularSource): + """A copy activity SQL server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database + source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression + with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored procedure parameters. + Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.synapse.artifacts.models.StoredProcedureParameter] + :param produce_additional_types: Which additional types to produce. + :type produce_additional_types: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(SqlServerSource, self).__init__(**kwargs) + self.type = 'SqlServerSource' # type: str + self.sql_reader_query = kwargs.get('sql_reader_query', None) + self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.produce_additional_types = kwargs.get('produce_additional_types', None) + + class SqlServerStoredProcedureActivity(ExecutionActivity): """SQL stored procedure activity type. @@ -17971,6 +28179,146 @@ def __init__( self.table = kwargs.get('table', None) +class SqlSink(CopySink): + """A copy activity SQL sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or + Expression with resultType string). + :type sql_writer_stored_procedure_name: object + :param sql_writer_table_type: SQL writer table type. Type: string (or Expression with + resultType string). + :type sql_writer_table_type: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType + string). + :type pre_copy_script: object + :param stored_procedure_parameters: SQL stored procedure parameters. + :type stored_procedure_parameters: dict[str, + ~azure.synapse.artifacts.models.StoredProcedureParameter] + :param stored_procedure_table_type_parameter_name: The stored procedure parameter name of the + table type. Type: string (or Expression with resultType string). + :type stored_procedure_table_type_parameter_name: object + :param table_option: The option to handle sink table, such as autoCreate. For now only + 'autoCreate' value is supported. Type: string (or Expression with resultType string). + :type table_option: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, + 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(SqlSink, self).__init__(**kwargs) + self.type = 'SqlSink' # type: str + self.sql_writer_stored_procedure_name = kwargs.get('sql_writer_stored_procedure_name', None) + self.sql_writer_table_type = kwargs.get('sql_writer_table_type', None) + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None) + self.table_option = kwargs.get('table_option', None) + + +class SqlSource(TabularSource): + """A copy activity SQL source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database + source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression + with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored procedure parameters. + Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.synapse.artifacts.models.StoredProcedureParameter] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + } + + def __init__( + self, + **kwargs + ): + super(SqlSource, self).__init__(**kwargs) + self.type = 'SqlSource' # type: str + self.sql_reader_query = kwargs.get('sql_reader_query', None) + self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + + class SquareLinkedService(LinkedService): """Square Service linked service. @@ -18087,29 +28435,79 @@ class SquareObjectDataset(Dataset): _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(SquareObjectDataset, self).__init__(**kwargs) + self.type = 'SquareObject' # type: str + self.table_name = kwargs.get('table_name', None) + + +class SquareSource(TabularSource): + """A copy activity Square Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, } def __init__( self, **kwargs ): - super(SquareObjectDataset, self).__init__(**kwargs) - self.type = 'SquareObject' # type: str - self.table_name = kwargs.get('table_name', None) + super(SquareSource, self).__init__(**kwargs) + self.type = 'SquareSource' # type: str + self.query = kwargs.get('query', None) class SSISAccessCredential(msrest.serialization.Model): @@ -18533,6 +28931,44 @@ def __init__( self.type = kwargs.get('type', None) +class SubResource(AzureEntityResource): + """Azure Synapse nested resource, which belongs to a workspace. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + :ivar etag: Resource Etag. + :vartype etag: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(SubResource, self).__init__(**kwargs) + + class SwitchActivity(Activity): """This activity evaluates an expression and executes activities under the cases property that correspond to the expression evaluation expected in the equals property. @@ -18692,6 +29128,55 @@ def __init__( self.encrypted_credential = kwargs.get('encrypted_credential', None) +class SybaseSource(TabularSource): + """A copy activity source for Sybase databases. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: Database query. Type: string (or Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(SybaseSource, self).__init__(**kwargs) + self.type = 'SybaseSource' # type: str + self.query = kwargs.get('query', None) + + class SybaseTableDataset(Dataset): """The Sybase table dataset. @@ -18910,6 +29395,66 @@ def __init__( self.reference_name = kwargs['reference_name'] +class TabularTranslator(CopyTranslator): + """A copy activity tabular translator. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy translator type.Constant filled by server. + :type type: str + :param column_mappings: Column mappings. Example: "UserId: MyUserId, Group: MyGroup, Name: + MyName" Type: string (or Expression with resultType string). This property will be retired. + Please use mappings property. + :type column_mappings: object + :param schema_mapping: The schema mapping to map between tabular data and hierarchical data. + Example: {"Column1": "$.Column1", "Column2": "$.Column2.Property1", "Column3": + "$.Column2.Property2"}. Type: object (or Expression with resultType object). This property will + be retired. Please use mappings property. + :type schema_mapping: object + :param collection_reference: The JSON Path of the Nested Array that is going to do cross-apply. + Type: object (or Expression with resultType object). + :type collection_reference: object + :param map_complex_values_to_string: Whether to map complex (array and object) values to simple + strings in json format. Type: boolean (or Expression with resultType boolean). + :type map_complex_values_to_string: object + :param mappings: Column mappings with logical types. Tabular->tabular example: + [{"source":{"name":"CustomerName","type":"String"},"sink":{"name":"ClientName","type":"String"}},{"source":{"name":"CustomerAddress","type":"String"},"sink":{"name":"ClientAddress","type":"String"}}]. + Hierarchical->tabular example: + [{"source":{"path":"$.CustomerName","type":"String"},"sink":{"name":"ClientName","type":"String"}},{"source":{"path":"$.CustomerAddress","type":"String"},"sink":{"name":"ClientAddress","type":"String"}}]. + Type: object (or Expression with resultType object). + :type mappings: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'column_mappings': {'key': 'columnMappings', 'type': 'object'}, + 'schema_mapping': {'key': 'schemaMapping', 'type': 'object'}, + 'collection_reference': {'key': 'collectionReference', 'type': 'object'}, + 'map_complex_values_to_string': {'key': 'mapComplexValuesToString', 'type': 'object'}, + 'mappings': {'key': 'mappings', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(TabularTranslator, self).__init__(**kwargs) + self.type = 'TabularTranslator' # type: str + self.column_mappings = kwargs.get('column_mappings', None) + self.schema_mapping = kwargs.get('schema_mapping', None) + self.collection_reference = kwargs.get('collection_reference', None) + self.map_complex_values_to_string = kwargs.get('map_complex_values_to_string', None) + self.mappings = kwargs.get('mappings', None) + + class TeradataLinkedService(LinkedService): """Linked service for Teradata data source. @@ -18980,6 +29525,97 @@ def __init__( self.encrypted_credential = kwargs.get('encrypted_credential', None) +class TeradataPartitionSettings(msrest.serialization.Model): + """The settings that will be leveraged for teradata source partitioning. + + :param partition_column_name: The name of the column that will be used for proceeding range or + hash partitioning. Type: string (or Expression with resultType string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of column specified in partitionColumnName that + will be used for proceeding range partitioning. Type: string (or Expression with resultType + string). + :type partition_upper_bound: object + :param partition_lower_bound: The minimum value of column specified in partitionColumnName that + will be used for proceeding range partitioning. Type: string (or Expression with resultType + string). + :type partition_lower_bound: object + """ + + _attribute_map = { + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(TeradataPartitionSettings, self).__init__(**kwargs) + self.partition_column_name = kwargs.get('partition_column_name', None) + self.partition_upper_bound = kwargs.get('partition_upper_bound', None) + self.partition_lower_bound = kwargs.get('partition_lower_bound', None) + + +class TeradataSource(TabularSource): + """A copy activity Teradata source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: Teradata query. Type: string (or Expression with resultType string). + :type query: object + :param partition_option: The partition mechanism that will be used for teradata read in + parallel. Possible values include: "None", "Hash", "DynamicRange". + :type partition_option: str or ~azure.synapse.artifacts.models.TeradataPartitionOption + :param partition_settings: The settings that will be leveraged for teradata source + partitioning. + :type partition_settings: ~azure.synapse.artifacts.models.TeradataPartitionSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'TeradataPartitionSettings'}, + } + + def __init__( + self, + **kwargs + ): + super(TeradataSource, self).__init__(**kwargs) + self.type = 'TeradataSource' # type: str + self.query = kwargs.get('query', None) + self.partition_option = kwargs.get('partition_option', None) + self.partition_settings = kwargs.get('partition_settings', None) + + class TeradataTableDataset(Dataset): """The Teradata database dataset. @@ -19043,6 +29679,86 @@ def __init__( self.table = kwargs.get('table', None) +class TextFormat(DatasetStorageFormat): + """The data stored in text format. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage format.Constant filled by server. + :type type: str + :param serializer: Serializer. Type: string (or Expression with resultType string). + :type serializer: object + :param deserializer: Deserializer. Type: string (or Expression with resultType string). + :type deserializer: object + :param column_delimiter: The column delimiter. Type: string (or Expression with resultType + string). + :type column_delimiter: object + :param row_delimiter: The row delimiter. Type: string (or Expression with resultType string). + :type row_delimiter: object + :param escape_char: The escape character. Type: string (or Expression with resultType string). + :type escape_char: object + :param quote_char: The quote character. Type: string (or Expression with resultType string). + :type quote_char: object + :param null_value: The null value string. Type: string (or Expression with resultType string). + :type null_value: object + :param encoding_name: The code page name of the preferred encoding. If miss, the default value + is ΓÇ£utf-8ΓÇ¥, unless BOM denotes another Unicode encoding. Refer to the ΓÇ£NameΓÇ¥ column of + the table in the following link to set supported values: + https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with + resultType string). + :type encoding_name: object + :param treat_empty_as_null: Treat empty column values in the text file as null. The default + value is true. Type: boolean (or Expression with resultType boolean). + :type treat_empty_as_null: object + :param skip_line_count: The number of lines/rows to be skipped when parsing text files. The + default value is 0. Type: integer (or Expression with resultType integer). + :type skip_line_count: object + :param first_row_as_header: When used as input, treat the first row of data as headers. When + used as output,write the headers into the output as the first row of data. The default value is + false. Type: boolean (or Expression with resultType boolean). + :type first_row_as_header: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'serializer': {'key': 'serializer', 'type': 'object'}, + 'deserializer': {'key': 'deserializer', 'type': 'object'}, + 'column_delimiter': {'key': 'columnDelimiter', 'type': 'object'}, + 'row_delimiter': {'key': 'rowDelimiter', 'type': 'object'}, + 'escape_char': {'key': 'escapeChar', 'type': 'object'}, + 'quote_char': {'key': 'quoteChar', 'type': 'object'}, + 'null_value': {'key': 'nullValue', 'type': 'object'}, + 'encoding_name': {'key': 'encodingName', 'type': 'object'}, + 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, + 'skip_line_count': {'key': 'skipLineCount', 'type': 'object'}, + 'first_row_as_header': {'key': 'firstRowAsHeader', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(TextFormat, self).__init__(**kwargs) + self.type = 'TextFormat' # type: str + self.column_delimiter = kwargs.get('column_delimiter', None) + self.row_delimiter = kwargs.get('row_delimiter', None) + self.escape_char = kwargs.get('escape_char', None) + self.quote_char = kwargs.get('quote_char', None) + self.null_value = kwargs.get('null_value', None) + self.encoding_name = kwargs.get('encoding_name', None) + self.treat_empty_as_null = kwargs.get('treat_empty_as_null', None) + self.skip_line_count = kwargs.get('skip_line_count', None) + self.first_row_as_header = kwargs.get('first_row_as_header', None) + + class TriggerDependencyProvisioningStatus(msrest.serialization.Model): """Defines the response of a provision trigger dependency operation. @@ -19073,6 +29789,43 @@ def __init__( self.provisioning_status = kwargs['provisioning_status'] +class TriggerDependencyReference(DependencyReference): + """Trigger referenced dependency. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: TumblingWindowTriggerDependencyReference. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. The type of dependency reference.Constant filled by server. + :type type: str + :param reference_trigger: Required. Referenced trigger. + :type reference_trigger: ~azure.synapse.artifacts.models.TriggerReference + """ + + _validation = { + 'type': {'required': True}, + 'reference_trigger': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_trigger': {'key': 'referenceTrigger', 'type': 'TriggerReference'}, + } + + _subtype_map = { + 'type': {'TumblingWindowTriggerDependencyReference': 'TumblingWindowTriggerDependencyReference'} + } + + def __init__( + self, + **kwargs + ): + super(TriggerDependencyReference, self).__init__(**kwargs) + self.type = 'TriggerDependencyReference' # type: str + self.reference_trigger = kwargs['reference_trigger'] + + class TriggerListResponse(msrest.serialization.Model): """A list of trigger resources. @@ -19125,20 +29878,52 @@ def __init__( self.parameters = kwargs.get('parameters', None) -class TriggerResource(SubResource): +class TriggerReference(msrest.serialization.Model): + """Trigger reference type. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Trigger reference type. Possible values include: "TriggerReference". + :type type: str or ~azure.synapse.artifacts.models.TriggerReferenceType + :param reference_name: Required. Reference trigger name. + :type reference_name: str + """ + + _validation = { + 'type': {'required': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(TriggerReference, self).__init__(**kwargs) + self.type = kwargs['type'] + self.reference_name = kwargs['reference_name'] + + +class TriggerResource(AzureEntityResource): """Trigger resource type. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :ivar id: The resource identifier. + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. :vartype id: str - :ivar name: The resource name. + :ivar name: The name of the resource. :vartype name: str - :ivar type: The resource type. + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. :vartype type: str - :ivar etag: Etag identifies change in the resource. + :ivar etag: Resource Etag. :vartype etag: str :param properties: Required. Properties of the trigger. :type properties: ~azure.synapse.artifacts.models.Trigger @@ -19295,6 +30080,139 @@ def __init__( self.status = None +class TumblingWindowTrigger(Trigger): + """Trigger that schedules pipeline runs for all fixed time interval windows from a start time without gaps and also supports backfill scenarios (when start time is in the past). + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Trigger type.Constant filled by server. + :type type: str + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are + called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". + :vartype runtime_state: str or ~azure.synapse.artifacts.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the trigger. + :type annotations: list[object] + :param pipeline: Required. Pipeline for which runs are created when an event is fired for + trigger window that is ready. + :type pipeline: ~azure.synapse.artifacts.models.TriggerPipelineReference + :param frequency: Required. The frequency of the time windows. Possible values include: + "Minute", "Hour". + :type frequency: str or ~azure.synapse.artifacts.models.TumblingWindowFrequency + :param interval: Required. The interval of the time windows. The minimum interval allowed is 15 + Minutes. + :type interval: int + :param start_time: Required. The start time for the time period for the trigger during which + events are fired for windows that are ready. Only UTC time is currently supported. + :type start_time: ~datetime.datetime + :param end_time: The end time for the time period for the trigger during which events are fired + for windows that are ready. Only UTC time is currently supported. + :type end_time: ~datetime.datetime + :param delay: Specifies how long the trigger waits past due time before triggering new run. It + doesn't alter window start and end time. The default is 0. Type: string (or Expression with + resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type delay: object + :param max_concurrency: Required. The max number of parallel time windows (ready for execution) + for which a new run is triggered. + :type max_concurrency: int + :param retry_policy: Retry policy that will be applied for failed pipeline runs. + :type retry_policy: ~azure.synapse.artifacts.models.RetryPolicy + :param depends_on: Triggers that this trigger depends on. Only tumbling window triggers are + supported. + :type depends_on: list[~azure.synapse.artifacts.models.DependencyReference] + """ + + _validation = { + 'type': {'required': True}, + 'runtime_state': {'readonly': True}, + 'pipeline': {'required': True}, + 'frequency': {'required': True}, + 'interval': {'required': True}, + 'start_time': {'required': True}, + 'max_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'pipeline': {'key': 'pipeline', 'type': 'TriggerPipelineReference'}, + 'frequency': {'key': 'typeProperties.frequency', 'type': 'str'}, + 'interval': {'key': 'typeProperties.interval', 'type': 'int'}, + 'start_time': {'key': 'typeProperties.startTime', 'type': 'iso-8601'}, + 'end_time': {'key': 'typeProperties.endTime', 'type': 'iso-8601'}, + 'delay': {'key': 'typeProperties.delay', 'type': 'object'}, + 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, + 'retry_policy': {'key': 'typeProperties.retryPolicy', 'type': 'RetryPolicy'}, + 'depends_on': {'key': 'typeProperties.dependsOn', 'type': '[DependencyReference]'}, + } + + def __init__( + self, + **kwargs + ): + super(TumblingWindowTrigger, self).__init__(**kwargs) + self.type = 'TumblingWindowTrigger' # type: str + self.pipeline = kwargs['pipeline'] + self.frequency = kwargs['frequency'] + self.interval = kwargs['interval'] + self.start_time = kwargs['start_time'] + self.end_time = kwargs.get('end_time', None) + self.delay = kwargs.get('delay', None) + self.max_concurrency = kwargs['max_concurrency'] + self.retry_policy = kwargs.get('retry_policy', None) + self.depends_on = kwargs.get('depends_on', None) + + +class TumblingWindowTriggerDependencyReference(TriggerDependencyReference): + """Referenced tumbling window trigger dependency. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. The type of dependency reference.Constant filled by server. + :type type: str + :param reference_trigger: Required. Referenced trigger. + :type reference_trigger: ~azure.synapse.artifacts.models.TriggerReference + :param offset: Timespan applied to the start time of a tumbling window when evaluating + dependency. + :type offset: str + :param size: The size of the window when evaluating the dependency. If undefined the frequency + of the tumbling window will be used. + :type size: str + """ + + _validation = { + 'type': {'required': True}, + 'reference_trigger': {'required': True}, + 'offset': {'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, + 'size': {'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_trigger': {'key': 'referenceTrigger', 'type': 'TriggerReference'}, + 'offset': {'key': 'offset', 'type': 'str'}, + 'size': {'key': 'size', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(TumblingWindowTriggerDependencyReference, self).__init__(**kwargs) + self.type = 'TumblingWindowTriggerDependencyReference' # type: str + self.offset = kwargs.get('offset', None) + self.size = kwargs.get('size', None) + + class UntilActivity(Activity): """This activity executes inner activities until the specified boolean expression results to true or timeout is reached, whichever is earlier. @@ -19543,6 +30461,56 @@ def __init__( self.encrypted_credential = kwargs.get('encrypted_credential', None) +class VerticaSource(TabularSource): + """A copy activity Vertica source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(VerticaSource, self).__init__(**kwargs) + self.type = 'VerticaSource' # type: str + self.query = kwargs.get('query', None) + + class VerticaTableDataset(Dataset): """Vertica dataset. @@ -19612,6 +30580,25 @@ def __init__( self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) +class VirtualNetworkProfile(msrest.serialization.Model): + """Virtual Network Profile. + + :param compute_subnet_id: Subnet ID used for computes in workspace. + :type compute_subnet_id: str + """ + + _attribute_map = { + 'compute_subnet_id': {'key': 'computeSubnetId', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(VirtualNetworkProfile, self).__init__(**kwargs) + self.compute_subnet_id = kwargs.get('compute_subnet_id', None) + + class WaitActivity(Activity): """This activity suspends pipeline execution for the specified interval. @@ -20072,6 +31059,47 @@ def __init__( self.type_properties = kwargs['type_properties'] +class WebSource(CopySource): + """A copy activity source for web page table. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(WebSource, self).__init__(**kwargs) + self.type = 'WebSource' # type: str + + class WebTableDataset(Dataset): """The dataset points to a HTML table in the web page. @@ -20137,64 +31165,80 @@ def __init__( self.path = kwargs.get('path', None) -class Workspace(Resource): - """Workspace resource type. +class Workspace(TrackedResource): + """A workspace. Variables are only populated by the server, and will be ignored when sending a request. - :ivar id: The resource identifier. + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. :vartype id: str - :ivar name: The resource name. + :ivar name: The name of the resource. :vartype name: str - :ivar type: The resource type. + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. :vartype type: str - :param location: The resource location. - :type location: str - :param tags: A set of tags. The resource tags. + :param tags: A set of tags. Resource tags. :type tags: dict[str, str] - :ivar e_tag: Etag identifies change in the resource. - :vartype e_tag: str - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param identity: Managed service identity of the workspace. - :type identity: ~azure.synapse.artifacts.models.WorkspaceIdentity - :ivar provisioning_state: Workspace provisioning state, example Succeeded. + :param location: Required. The geo-location where the resource lives. + :type location: str + :param identity: Identity of the workspace. + :type identity: ~azure.synapse.artifacts.models.ManagedIdentity + :param default_data_lake_storage: Workspace default data lake storage account details. + :type default_data_lake_storage: ~azure.synapse.artifacts.models.DataLakeStorageAccountDetails + :param sql_administrator_login_password: SQL administrator login password. + :type sql_administrator_login_password: str + :param managed_resource_group_name: Workspace managed resource group. The resource group name + uniquely identifies the resource group within the user subscriptionId. The resource group name + must be no longer than 90 characters long, and must be alphanumeric characters + (Char.IsLetterOrDigit()) and '-', '_', '(', ')' and'.'. Note that the name cannot end with '.'. + :type managed_resource_group_name: str + :ivar provisioning_state: Resource provisioning state. :vartype provisioning_state: str - :ivar create_time: Time the workspace was created in ISO8601 format. - :vartype create_time: ~datetime.datetime - :ivar version: Version of the workspace. - :vartype version: str - :param default_storage: Linked service reference. - :type default_storage: ~azure.synapse.artifacts.models.LinkedServiceReference - :param default_sql_server: Linked service reference. - :type default_sql_server: ~azure.synapse.artifacts.models.LinkedServiceReference + :param sql_administrator_login: Login for workspace SQL active directory administrator. + :type sql_administrator_login: str + :param virtual_network_profile: Virtual Network profile. + :type virtual_network_profile: ~azure.synapse.artifacts.models.VirtualNetworkProfile + :param connectivity_endpoints: Connectivity endpoints. + :type connectivity_endpoints: dict[str, str] + :param managed_virtual_network: Setting this to 'default' will ensure that all compute for this + workspace is in a virtual network managed on behalf of the user. + :type managed_virtual_network: str + :param private_endpoint_connections: Private endpoint connections to the workspace. + :type private_endpoint_connections: + list[~azure.synapse.artifacts.models.PrivateEndpointConnection] + :ivar extra_properties: Workspace level configs and feature flags. + :vartype extra_properties: dict[str, object] """ _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, 'type': {'readonly': True}, - 'e_tag': {'readonly': True}, + 'location': {'required': True}, 'provisioning_state': {'readonly': True}, - 'create_time': {'readonly': True}, - 'version': {'readonly': True}, + 'extra_properties': {'readonly': True}, } _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'location': {'key': 'location', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, - 'e_tag': {'key': 'eTag', 'type': 'str'}, - 'additional_properties': {'key': '', 'type': '{object}'}, - 'identity': {'key': 'identity', 'type': 'WorkspaceIdentity'}, + 'location': {'key': 'location', 'type': 'str'}, + 'identity': {'key': 'identity', 'type': 'ManagedIdentity'}, + 'default_data_lake_storage': {'key': 'properties.defaultDataLakeStorage', 'type': 'DataLakeStorageAccountDetails'}, + 'sql_administrator_login_password': {'key': 'properties.sqlAdministratorLoginPassword', 'type': 'str'}, + 'managed_resource_group_name': {'key': 'properties.managedResourceGroupName', 'type': 'str'}, 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, - 'create_time': {'key': 'properties.createTime', 'type': 'iso-8601'}, - 'version': {'key': 'properties.version', 'type': 'str'}, - 'default_storage': {'key': 'properties.defaultStorage', 'type': 'LinkedServiceReference'}, - 'default_sql_server': {'key': 'properties.defaultSqlServer', 'type': 'LinkedServiceReference'}, + 'sql_administrator_login': {'key': 'properties.sqlAdministratorLogin', 'type': 'str'}, + 'virtual_network_profile': {'key': 'properties.virtualNetworkProfile', 'type': 'VirtualNetworkProfile'}, + 'connectivity_endpoints': {'key': 'properties.connectivityEndpoints', 'type': '{str}'}, + 'managed_virtual_network': {'key': 'properties.managedVirtualNetwork', 'type': 'str'}, + 'private_endpoint_connections': {'key': 'properties.privateEndpointConnections', 'type': '[PrivateEndpointConnection]'}, + 'extra_properties': {'key': 'properties.extraProperties', 'type': '{object}'}, } def __init__( @@ -20202,13 +31246,17 @@ def __init__( **kwargs ): super(Workspace, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) self.identity = kwargs.get('identity', None) + self.default_data_lake_storage = kwargs.get('default_data_lake_storage', None) + self.sql_administrator_login_password = kwargs.get('sql_administrator_login_password', None) + self.managed_resource_group_name = kwargs.get('managed_resource_group_name', None) self.provisioning_state = None - self.create_time = None - self.version = None - self.default_storage = kwargs.get('default_storage', None) - self.default_sql_server = kwargs.get('default_sql_server', None) + self.sql_administrator_login = kwargs.get('sql_administrator_login', None) + self.virtual_network_profile = kwargs.get('virtual_network_profile', None) + self.connectivity_endpoints = kwargs.get('connectivity_endpoints', None) + self.managed_virtual_network = kwargs.get('managed_virtual_network', None) + self.private_endpoint_connections = kwargs.get('private_endpoint_connections', None) + self.extra_properties = None class WorkspaceIdentity(msrest.serialization.Model): @@ -20409,6 +31457,56 @@ def __init__( self.table_name = kwargs.get('table_name', None) +class XeroSource(TabularSource): + """A copy activity Xero Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(XeroSource, self).__init__(**kwargs) + self.type = 'XeroSource' # type: str + self.query = kwargs.get('query', None) + + class ZohoLinkedService(LinkedService): """Zoho server linked service. @@ -20537,3 +31635,53 @@ def __init__( super(ZohoObjectDataset, self).__init__(**kwargs) self.type = 'ZohoObject' # type: str self.table_name = kwargs.get('table_name', None) + + +class ZohoSource(TabularSource): + """A copy activity Zoho server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(ZohoSource, self).__init__(**kwargs) + self.type = 'ZohoSource' # type: str + self.query = kwargs.get('query', None) diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_models_py3.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_models_py3.py index 97879f461074..429ae1a8680d 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_models_py3.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_models_py3.py @@ -490,7 +490,7 @@ class Dataset(msrest.serialization.Model): """The Azure Data Factory nested object which identifies data within different data stores, such as tables, files, folders, and documents. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AmazonMWSObjectDataset, AmazonRedshiftTableDataset, AvroDataset, AzureDataExplorerTableDataset, AzureMariaDBTableDataset, AzureMySqlTableDataset, AzurePostgreSqlTableDataset, AzureSearchIndexDataset, AzureSqlDWTableDataset, AzureSqlMITableDataset, AzureSqlTableDataset, AzureTableDataset, BinaryDataset, CassandraTableDataset, CommonDataServiceForAppsEntityDataset, ConcurObjectDataset, CosmosDbMongoDbApiCollectionDataset, CosmosDbSqlApiCollectionDataset, CouchbaseTableDataset, Db2TableDataset, DelimitedTextDataset, DocumentDbCollectionDataset, DrillTableDataset, DynamicsAXResourceDataset, DynamicsCrmEntityDataset, DynamicsEntityDataset, EloquaObjectDataset, GoogleAdWordsObjectDataset, GoogleBigQueryObjectDataset, GreenplumTableDataset, HBaseObjectDataset, HiveObjectDataset, HubspotObjectDataset, ImpalaObjectDataset, InformixTableDataset, JiraObjectDataset, JsonDataset, MagentoObjectDataset, MariaDBTableDataset, MarketoObjectDataset, MicrosoftAccessTableDataset, MongoDbCollectionDataset, MongoDbV2CollectionDataset, MySqlTableDataset, NetezzaTableDataset, ODataResourceDataset, OdbcTableDataset, Office365Dataset, OracleServiceCloudObjectDataset, OracleTableDataset, OrcDataset, ParquetDataset, PaypalObjectDataset, PhoenixObjectDataset, PostgreSqlTableDataset, PrestoObjectDataset, QuickBooksObjectDataset, RelationalTableDataset, ResponsysObjectDataset, RestResourceDataset, SalesforceMarketingCloudObjectDataset, SalesforceObjectDataset, SalesforceServiceCloudObjectDataset, SapBwCubeDataset, SapCloudForCustomerResourceDataset, SapEccResourceDataset, SapHanaTableDataset, SapOpenHubTableDataset, SapTableResourceDataset, ServiceNowObjectDataset, ShopifyObjectDataset, SparkObjectDataset, SqlServerTableDataset, SquareObjectDataset, SybaseTableDataset, TeradataTableDataset, VerticaTableDataset, WebTableDataset, XeroObjectDataset, ZohoObjectDataset. + sub-classes are: AmazonMWSObjectDataset, AmazonRedshiftTableDataset, AvroDataset, AzureDataExplorerTableDataset, AzureMariaDBTableDataset, AzureMySqlTableDataset, AzurePostgreSqlTableDataset, AzureSearchIndexDataset, AzureSqlDWTableDataset, AzureSqlMITableDataset, AzureSqlTableDataset, AzureTableDataset, BinaryDataset, CassandraTableDataset, CommonDataServiceForAppsEntityDataset, ConcurObjectDataset, CosmosDbMongoDbApiCollectionDataset, CosmosDbSqlApiCollectionDataset, CouchbaseTableDataset, CustomDataset, Db2TableDataset, DelimitedTextDataset, DocumentDbCollectionDataset, DrillTableDataset, DynamicsAXResourceDataset, DynamicsCrmEntityDataset, DynamicsEntityDataset, EloquaObjectDataset, GoogleAdWordsObjectDataset, GoogleBigQueryObjectDataset, GreenplumTableDataset, HBaseObjectDataset, HiveObjectDataset, HubspotObjectDataset, ImpalaObjectDataset, InformixTableDataset, JiraObjectDataset, JsonDataset, MagentoObjectDataset, MariaDBTableDataset, MarketoObjectDataset, MicrosoftAccessTableDataset, MongoDbCollectionDataset, MongoDbV2CollectionDataset, MySqlTableDataset, NetezzaTableDataset, ODataResourceDataset, OdbcTableDataset, Office365Dataset, OracleServiceCloudObjectDataset, OracleTableDataset, OrcDataset, ParquetDataset, PaypalObjectDataset, PhoenixObjectDataset, PostgreSqlTableDataset, PrestoObjectDataset, QuickBooksObjectDataset, RelationalTableDataset, ResponsysObjectDataset, RestResourceDataset, SalesforceMarketingCloudObjectDataset, SalesforceObjectDataset, SalesforceServiceCloudObjectDataset, SapBwCubeDataset, SapCloudForCustomerResourceDataset, SapEccResourceDataset, SapHanaTableDataset, SapOpenHubTableDataset, SapTableResourceDataset, ServiceNowObjectDataset, ShopifyObjectDataset, SparkObjectDataset, SqlServerTableDataset, SquareObjectDataset, SybaseTableDataset, TeradataTableDataset, VerticaTableDataset, WebTableDataset, XeroObjectDataset, ZohoObjectDataset. All required parameters must be populated in order to send to Azure. @@ -536,7 +536,7 @@ class Dataset(msrest.serialization.Model): } _subtype_map = { - 'type': {'AmazonMWSObject': 'AmazonMWSObjectDataset', 'AmazonRedshiftTable': 'AmazonRedshiftTableDataset', 'Avro': 'AvroDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'AzureMariaDBTable': 'AzureMariaDBTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'Binary': 'BinaryDataset', 'CassandraTable': 'CassandraTableDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'ConcurObject': 'ConcurObjectDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'CosmosDbSqlApiCollection': 'CosmosDbSqlApiCollectionDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'Db2Table': 'Db2TableDataset', 'DelimitedText': 'DelimitedTextDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'DrillTable': 'DrillTableDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'EloquaObject': 'EloquaObjectDataset', 'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'HBaseObject': 'HBaseObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'InformixTable': 'InformixTableDataset', 'JiraObject': 'JiraObjectDataset', 'Json': 'JsonDataset', 'MagentoObject': 'MagentoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MarketoObject': 'MarketoObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MySqlTable': 'MySqlTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ODataResource': 'ODataResourceDataset', 'OdbcTable': 'OdbcTableDataset', 'Office365Table': 'Office365Dataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'OracleTable': 'OracleTableDataset', 'Orc': 'OrcDataset', 'Parquet': 'ParquetDataset', 'PaypalObject': 'PaypalObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'PrestoObject': 'PrestoObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'RestResource': 'RestResourceDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SapBwCube': 'SapBwCubeDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'SparkObject': 'SparkObjectDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SquareObject': 'SquareObjectDataset', 'SybaseTable': 'SybaseTableDataset', 'TeradataTable': 'TeradataTableDataset', 'VerticaTable': 'VerticaTableDataset', 'WebTable': 'WebTableDataset', 'XeroObject': 'XeroObjectDataset', 'ZohoObject': 'ZohoObjectDataset'} + 'type': {'AmazonMWSObject': 'AmazonMWSObjectDataset', 'AmazonRedshiftTable': 'AmazonRedshiftTableDataset', 'Avro': 'AvroDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'AzureMariaDBTable': 'AzureMariaDBTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'Binary': 'BinaryDataset', 'CassandraTable': 'CassandraTableDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'ConcurObject': 'ConcurObjectDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'CosmosDbSqlApiCollection': 'CosmosDbSqlApiCollectionDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'CustomDataset': 'CustomDataset', 'Db2Table': 'Db2TableDataset', 'DelimitedText': 'DelimitedTextDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'DrillTable': 'DrillTableDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'EloquaObject': 'EloquaObjectDataset', 'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'HBaseObject': 'HBaseObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'InformixTable': 'InformixTableDataset', 'JiraObject': 'JiraObjectDataset', 'Json': 'JsonDataset', 'MagentoObject': 'MagentoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MarketoObject': 'MarketoObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MySqlTable': 'MySqlTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ODataResource': 'ODataResourceDataset', 'OdbcTable': 'OdbcTableDataset', 'Office365Table': 'Office365Dataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'OracleTable': 'OracleTableDataset', 'Orc': 'OrcDataset', 'Parquet': 'ParquetDataset', 'PaypalObject': 'PaypalObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'PrestoObject': 'PrestoObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'RestResource': 'RestResourceDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SapBwCube': 'SapBwCubeDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'SparkObject': 'SparkObjectDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SquareObject': 'SquareObjectDataset', 'SybaseTable': 'SybaseTableDataset', 'TeradataTable': 'TeradataTableDataset', 'VerticaTable': 'VerticaTableDataset', 'WebTable': 'WebTableDataset', 'XeroObject': 'XeroObjectDataset', 'ZohoObject': 'ZohoObjectDataset'} } def __init__( @@ -632,6 +632,179 @@ def __init__( self.table_name = table_name +class CopySource(msrest.serialization.Model): + """A copy activity source. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AvroSource, AzureBlobFSSource, AzureDataExplorerSource, AzureDataLakeStoreSource, BinarySource, BlobSource, CommonDataServiceForAppsSource, CosmosDbMongoDbApiSource, CosmosDbSqlApiSource, DelimitedTextSource, DocumentDbCollectionSource, DynamicsCrmSource, DynamicsSource, FileSystemSource, HdfsSource, HttpSource, JsonSource, MicrosoftAccessSource, MongoDbSource, MongoDbV2Source, ODataSource, Office365Source, OracleSource, OrcSource, ParquetSource, RelationalSource, RestSource, SalesforceServiceCloudSource, TabularSource, WebSource. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + } + + _subtype_map = { + 'type': {'AvroSource': 'AvroSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'BinarySource': 'BinarySource', 'BlobSource': 'BlobSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'CosmosDbSqlApiSource': 'CosmosDbSqlApiSource', 'DelimitedTextSource': 'DelimitedTextSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'FileSystemSource': 'FileSystemSource', 'HdfsSource': 'HdfsSource', 'HttpSource': 'HttpSource', 'JsonSource': 'JsonSource', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'MongoDbSource': 'MongoDbSource', 'MongoDbV2Source': 'MongoDbV2Source', 'ODataSource': 'ODataSource', 'Office365Source': 'Office365Source', 'OracleSource': 'OracleSource', 'OrcSource': 'OrcSource', 'ParquetSource': 'ParquetSource', 'RelationalSource': 'RelationalSource', 'RestSource': 'RestSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'TabularSource': 'TabularSource', 'WebSource': 'WebSource'} + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + **kwargs + ): + super(CopySource, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = 'CopySource' # type: str + self.source_retry_count = source_retry_count + self.source_retry_wait = source_retry_wait + self.max_concurrent_connections = max_concurrent_connections + + +class TabularSource(CopySource): + """Copy activity sources of tabular type. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AmazonMWSSource, AmazonRedshiftSource, AzureMariaDBSource, AzureMySqlSource, AzurePostgreSqlSource, AzureSqlSource, AzureTableSource, CassandraSource, ConcurSource, CouchbaseSource, Db2Source, DrillSource, DynamicsAXSource, EloquaSource, GoogleAdWordsSource, GoogleBigQuerySource, GreenplumSource, HBaseSource, HiveSource, HubspotSource, ImpalaSource, InformixSource, JiraSource, MagentoSource, MariaDBSource, MarketoSource, MySqlSource, NetezzaSource, OdbcSource, OracleServiceCloudSource, PaypalSource, PhoenixSource, PostgreSqlSource, PrestoSource, QuickBooksSource, ResponsysSource, SalesforceMarketingCloudSource, SalesforceSource, SapBwSource, SapCloudForCustomerSource, SapEccSource, SapHanaSource, SapOpenHubSource, SapTableSource, ServiceNowSource, ShopifySource, SparkSource, SqlDWSource, SqlMISource, SqlServerSource, SqlSource, SquareSource, SybaseSource, TeradataSource, VerticaSource, XeroSource, ZohoSource. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + } + + _subtype_map = { + 'type': {'AmazonMWSSource': 'AmazonMWSSource', 'AmazonRedshiftSource': 'AmazonRedshiftSource', 'AzureMariaDBSource': 'AzureMariaDBSource', 'AzureMySqlSource': 'AzureMySqlSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AzureSqlSource': 'AzureSqlSource', 'AzureTableSource': 'AzureTableSource', 'CassandraSource': 'CassandraSource', 'ConcurSource': 'ConcurSource', 'CouchbaseSource': 'CouchbaseSource', 'Db2Source': 'Db2Source', 'DrillSource': 'DrillSource', 'DynamicsAXSource': 'DynamicsAXSource', 'EloquaSource': 'EloquaSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'GreenplumSource': 'GreenplumSource', 'HBaseSource': 'HBaseSource', 'HiveSource': 'HiveSource', 'HubspotSource': 'HubspotSource', 'ImpalaSource': 'ImpalaSource', 'InformixSource': 'InformixSource', 'JiraSource': 'JiraSource', 'MagentoSource': 'MagentoSource', 'MariaDBSource': 'MariaDBSource', 'MarketoSource': 'MarketoSource', 'MySqlSource': 'MySqlSource', 'NetezzaSource': 'NetezzaSource', 'OdbcSource': 'OdbcSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'PaypalSource': 'PaypalSource', 'PhoenixSource': 'PhoenixSource', 'PostgreSqlSource': 'PostgreSqlSource', 'PrestoSource': 'PrestoSource', 'QuickBooksSource': 'QuickBooksSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'SalesforceSource': 'SalesforceSource', 'SapBwSource': 'SapBwSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SapEccSource': 'SapEccSource', 'SapHanaSource': 'SapHanaSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapTableSource': 'SapTableSource', 'ServiceNowSource': 'ServiceNowSource', 'ShopifySource': 'ShopifySource', 'SparkSource': 'SparkSource', 'SqlDWSource': 'SqlDWSource', 'SqlMISource': 'SqlMISource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'SquareSource': 'SquareSource', 'SybaseSource': 'SybaseSource', 'TeradataSource': 'TeradataSource', 'VerticaSource': 'VerticaSource', 'XeroSource': 'XeroSource', 'ZohoSource': 'ZohoSource'} + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + **kwargs + ): + super(TabularSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'TabularSource' # type: str + self.query_timeout = query_timeout + + +class AmazonMWSSource(TabularSource): + """A copy activity Amazon Marketplace Web Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(AmazonMWSSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'AmazonMWSSource' # type: str + self.query = query + + class AmazonRedshiftLinkedService(LinkedService): """Linked service for Amazon Redshift. @@ -717,6 +890,69 @@ def __init__( self.encrypted_credential = encrypted_credential +class AmazonRedshiftSource(TabularSource): + """A copy activity source for Amazon Redshift Source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: Database query. Type: string (or Expression with resultType string). + :type query: object + :param redshift_unload_settings: The Amazon S3 settings needed for the interim Amazon S3 when + copying from Amazon Redshift with unload. With this, data from Amazon Redshift source will be + unloaded into S3 first and then copied into the targeted sink from the interim S3. + :type redshift_unload_settings: ~azure.synapse.artifacts.models.RedshiftUnloadSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + 'redshift_unload_settings': {'key': 'redshiftUnloadSettings', 'type': 'RedshiftUnloadSettings'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + redshift_unload_settings: Optional["RedshiftUnloadSettings"] = None, + **kwargs + ): + super(AmazonRedshiftSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'AmazonRedshiftSource' # type: str + self.query = query + self.redshift_unload_settings = redshift_unload_settings + + class AmazonRedshiftTableDataset(Dataset): """The Amazon Redshift table dataset. @@ -871,946 +1107,784 @@ def __init__( self.encrypted_credential = encrypted_credential -class AppendVariableActivity(Activity): - """Append value for a Variable of type Array. +class DatasetLocation(msrest.serialization.Model): + """Dataset location. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AmazonS3Location, AzureBlobFSLocation, AzureBlobStorageLocation, AzureDataLakeStoreLocation, AzureFileStorageLocation, FileServerLocation, FtpServerLocation, GoogleCloudStorageLocation, HdfsLocation, HttpServerLocation, SftpLocation. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. Type of dataset storage location.Constant filled by server. :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param variable_name: Name of the variable whose value needs to be appended to. - :type variable_name: str - :param value: Value to be appended. Could be a static value or Expression. - :type value: object + :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + string). + :type file_name: object """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'variable_name': {'key': 'typeProperties.variableName', 'type': 'str'}, - 'value': {'key': 'typeProperties.value', 'type': 'object'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + _subtype_map = { + 'type': {'AmazonS3Location': 'AmazonS3Location', 'AzureBlobFSLocation': 'AzureBlobFSLocation', 'AzureBlobStorageLocation': 'AzureBlobStorageLocation', 'AzureDataLakeStoreLocation': 'AzureDataLakeStoreLocation', 'AzureFileStorageLocation': 'AzureFileStorageLocation', 'FileServerLocation': 'FileServerLocation', 'FtpServerLocation': 'FtpServerLocation', 'GoogleCloudStorageLocation': 'GoogleCloudStorageLocation', 'HdfsLocation': 'HdfsLocation', 'HttpServerLocation': 'HttpServerLocation', 'SftpLocation': 'SftpLocation'} } def __init__( self, *, - name: str, additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - depends_on: Optional[List["ActivityDependency"]] = None, - user_properties: Optional[List["UserProperty"]] = None, - variable_name: Optional[str] = None, - value: Optional[object] = None, + folder_path: Optional[object] = None, + file_name: Optional[object] = None, **kwargs ): - super(AppendVariableActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.type = 'AppendVariable' # type: str - self.variable_name = variable_name - self.value = value + super(DatasetLocation, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = 'DatasetLocation' # type: str + self.folder_path = folder_path + self.file_name = file_name -class AvroDataset(Dataset): - """Avro dataset. +class AmazonS3Location(DatasetLocation): + """The location of amazon S3 dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Type of dataset storage location.Constant filled by server. :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param location: The location of the avro storage. - :type location: ~azure.synapse.artifacts.models.DatasetLocation - :param avro_compression_codec: Possible values include: "none", "deflate", "snappy", "xz", - "bzip2". - :type avro_compression_codec: str or ~azure.synapse.artifacts.models.AvroCompressionCodec - :param avro_compression_level: - :type avro_compression_level: int + :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + string). + :type file_name: object + :param bucket_name: Specify the bucketName of amazon S3. Type: string (or Expression with + resultType string). + :type bucket_name: object + :param version: Specify the version of amazon S3. Type: string (or Expression with resultType + string). + :type version: object """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, - 'avro_compression_level': {'maximum': 9, 'minimum': 1}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'avro_compression_codec': {'key': 'typeProperties.avroCompressionCodec', 'type': 'str'}, - 'avro_compression_level': {'key': 'typeProperties.avroCompressionLevel', 'type': 'int'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'bucket_name': {'key': 'bucketName', 'type': 'object'}, + 'version': {'key': 'version', 'type': 'object'}, } def __init__( self, *, - linked_service_name: "LinkedServiceReference", additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, - location: Optional["DatasetLocation"] = None, - avro_compression_codec: Optional[Union[str, "AvroCompressionCodec"]] = None, - avro_compression_level: Optional[int] = None, + folder_path: Optional[object] = None, + file_name: Optional[object] = None, + bucket_name: Optional[object] = None, + version: Optional[object] = None, **kwargs ): - super(AvroDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'Avro' # type: str - self.location = location - self.avro_compression_codec = avro_compression_codec - self.avro_compression_level = avro_compression_level + super(AmazonS3Location, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) + self.type = 'AmazonS3Location' # type: str + self.bucket_name = bucket_name + self.version = version -class AzureBatchLinkedService(LinkedService): - """Azure Batch linked service. +class StoreReadSettings(msrest.serialization.Model): + """Connector read setting. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AmazonS3ReadSettings, AzureBlobFSReadSettings, AzureBlobStorageReadSettings, AzureDataLakeStoreReadSettings, AzureFileStorageReadSettings, FileServerReadSettings, FtpReadSettings, GoogleCloudStorageReadSettings, HdfsReadSettings, HttpReadSettings, SftpReadSettings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. The read setting type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param account_name: Required. The Azure Batch account name. Type: string (or Expression with - resultType string). - :type account_name: object - :param access_key: The Azure Batch account access key. - :type access_key: ~azure.synapse.artifacts.models.SecretBase - :param batch_uri: Required. The Azure Batch URI. Type: string (or Expression with resultType - string). - :type batch_uri: object - :param pool_name: Required. The Azure Batch pool name. Type: string (or Expression with - resultType string). - :type pool_name: object - :param linked_service_name: Required. The Azure Storage linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object """ _validation = { 'type': {'required': True}, - 'account_name': {'required': True}, - 'batch_uri': {'required': True}, - 'pool_name': {'required': True}, - 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'account_name': {'key': 'typeProperties.accountName', 'type': 'object'}, - 'access_key': {'key': 'typeProperties.accessKey', 'type': 'SecretBase'}, - 'batch_uri': {'key': 'typeProperties.batchUri', 'type': 'object'}, - 'pool_name': {'key': 'typeProperties.poolName', 'type': 'object'}, - 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + } + + _subtype_map = { + 'type': {'AmazonS3ReadSettings': 'AmazonS3ReadSettings', 'AzureBlobFSReadSettings': 'AzureBlobFSReadSettings', 'AzureBlobStorageReadSettings': 'AzureBlobStorageReadSettings', 'AzureDataLakeStoreReadSettings': 'AzureDataLakeStoreReadSettings', 'AzureFileStorageReadSettings': 'AzureFileStorageReadSettings', 'FileServerReadSettings': 'FileServerReadSettings', 'FtpReadSettings': 'FtpReadSettings', 'GoogleCloudStorageReadSettings': 'GoogleCloudStorageReadSettings', 'HdfsReadSettings': 'HdfsReadSettings', 'HttpReadSettings': 'HttpReadSettings', 'SftpReadSettings': 'SftpReadSettings'} } def __init__( self, *, - account_name: object, - batch_uri: object, - pool_name: object, - linked_service_name: "LinkedServiceReference", additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, - description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - access_key: Optional["SecretBase"] = None, - encrypted_credential: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, **kwargs ): - super(AzureBatchLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'AzureBatch' # type: str - self.account_name = account_name - self.access_key = access_key - self.batch_uri = batch_uri - self.pool_name = pool_name - self.linked_service_name = linked_service_name - self.encrypted_credential = encrypted_credential + super(StoreReadSettings, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = 'StoreReadSettings' # type: str + self.max_concurrent_connections = max_concurrent_connections -class AzureBlobFSLinkedService(LinkedService): - """Azure Data Lake Storage Gen2 linked service. +class AmazonS3ReadSettings(StoreReadSettings): + """Azure data lake store read settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. The read setting type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param url: Required. Endpoint for the Azure Data Lake Storage Gen2 service. Type: string (or - Expression with resultType string). - :type url: object - :param account_key: Account key for the Azure Data Lake Storage Gen2 service. Type: string (or - Expression with resultType string). - :type account_key: object - :param service_principal_id: The ID of the application used to authenticate against the Azure - Data Lake Storage Gen2 account. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The Key of the application used to authenticate against the Azure - Data Lake Storage Gen2 account. - :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal belongs. Type: - string (or Expression with resultType string). - :type tenant: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :type recursive: object + :param wildcard_folder_path: AmazonS3 wildcardFolderPath. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type wildcard_folder_path: object + :param wildcard_file_name: AmazonS3 wildcardFileName. Type: string (or Expression with + resultType string). + :type wildcard_file_name: object + :param prefix: The prefix filter for the S3 object name. Type: string (or Expression with + resultType string). + :type prefix: object + :param enable_partition_discovery: Indicates whether to enable partition discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. Type: string (or + Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression + with resultType string). + :type modified_datetime_end: object """ _validation = { 'type': {'required': True}, - 'url': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'account_key': {'key': 'typeProperties.accountKey', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'prefix': {'key': 'prefix', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } def __init__( self, *, - url: object, additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, - description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - account_key: Optional[object] = None, - service_principal_id: Optional[object] = None, - service_principal_key: Optional["SecretBase"] = None, - tenant: Optional[object] = None, - encrypted_credential: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + recursive: Optional[object] = None, + wildcard_folder_path: Optional[object] = None, + wildcard_file_name: Optional[object] = None, + prefix: Optional[object] = None, + enable_partition_discovery: Optional[bool] = None, + modified_datetime_start: Optional[object] = None, + modified_datetime_end: Optional[object] = None, **kwargs ): - super(AzureBlobFSLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'AzureBlobFS' # type: str - self.url = url - self.account_key = account_key - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.tenant = tenant - self.encrypted_credential = encrypted_credential + super(AmazonS3ReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'AmazonS3ReadSettings' # type: str + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.prefix = prefix + self.enable_partition_discovery = enable_partition_discovery + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end -class AzureBlobStorageLinkedService(LinkedService): - """The azure blob storage linked service. +class AppendVariableActivity(Activity): + """Append value for a Variable of type Array. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. + :param description: Activity description. :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: The connection string. It is mutually exclusive with sasUri, - serviceEndpoint property. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param account_key: The Azure key vault secret reference of accountKey in connection string. - :type account_key: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference - :param sas_uri: SAS URI of the Azure Blob Storage resource. It is mutually exclusive with - connectionString, serviceEndpoint property. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type sas_uri: object - :param sas_token: The Azure key vault secret reference of sasToken in sas uri. - :type sas_token: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference - :param service_endpoint: Blob service endpoint of the Azure Blob Storage resource. It is - mutually exclusive with connectionString, sasUri property. - :type service_endpoint: str - :param service_principal_id: The ID of the service principal used to authenticate against Azure - SQL Data Warehouse. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The key of the service principal used to authenticate against - Azure SQL Data Warehouse. - :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal belongs. Type: - string (or Expression with resultType string). - :type tenant: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param variable_name: Name of the variable whose value needs to be appended to. + :type variable_name: str + :param value: Value to be appended. Could be a static value or Expression. + :type value: object """ _validation = { + 'name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, - 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, - 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, - 'service_endpoint': {'key': 'typeProperties.serviceEndpoint', 'type': 'str'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'variable_name': {'key': 'typeProperties.variableName', 'type': 'str'}, + 'value': {'key': 'typeProperties.value', 'type': 'object'}, } def __init__( self, *, + name: str, additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - connection_string: Optional[object] = None, - account_key: Optional["AzureKeyVaultSecretReference"] = None, - sas_uri: Optional[object] = None, - sas_token: Optional["AzureKeyVaultSecretReference"] = None, - service_endpoint: Optional[str] = None, - service_principal_id: Optional[object] = None, - service_principal_key: Optional["SecretBase"] = None, - tenant: Optional[object] = None, - encrypted_credential: Optional[str] = None, + depends_on: Optional[List["ActivityDependency"]] = None, + user_properties: Optional[List["UserProperty"]] = None, + variable_name: Optional[str] = None, + value: Optional[object] = None, **kwargs ): - super(AzureBlobStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'AzureBlobStorage' # type: str - self.connection_string = connection_string - self.account_key = account_key - self.sas_uri = sas_uri - self.sas_token = sas_token - self.service_endpoint = service_endpoint - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.tenant = tenant - self.encrypted_credential = encrypted_credential + super(AppendVariableActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.type = 'AppendVariable' # type: str + self.variable_name = variable_name + self.value = value -class AzureDatabricksLinkedService(LinkedService): - """Azure Databricks linked service. +class AutoPauseProperties(msrest.serialization.Model): + """Auto-pausing properties of a Big Data pool powered by Apache Spark. + + :param delay_in_minutes: Number of minutes of idle time before the Big Data pool is + automatically paused. + :type delay_in_minutes: int + :param enabled: Whether auto-pausing is enabled for the Big Data pool. + :type enabled: bool + """ + + _attribute_map = { + 'delay_in_minutes': {'key': 'delayInMinutes', 'type': 'int'}, + 'enabled': {'key': 'enabled', 'type': 'bool'}, + } + + def __init__( + self, + *, + delay_in_minutes: Optional[int] = None, + enabled: Optional[bool] = None, + **kwargs + ): + super(AutoPauseProperties, self).__init__(**kwargs) + self.delay_in_minutes = delay_in_minutes + self.enabled = enabled + + +class AutoScaleProperties(msrest.serialization.Model): + """Auto-scaling properties of a Big Data pool powered by Apache Spark. + + :param min_node_count: The minimum number of nodes the Big Data pool can support. + :type min_node_count: int + :param enabled: Whether automatic scaling is enabled for the Big Data pool. + :type enabled: bool + :param max_node_count: The maximum number of nodes the Big Data pool can support. + :type max_node_count: int + """ + + _attribute_map = { + 'min_node_count': {'key': 'minNodeCount', 'type': 'int'}, + 'enabled': {'key': 'enabled', 'type': 'bool'}, + 'max_node_count': {'key': 'maxNodeCount', 'type': 'int'}, + } + + def __init__( + self, + *, + min_node_count: Optional[int] = None, + enabled: Optional[bool] = None, + max_node_count: Optional[int] = None, + **kwargs + ): + super(AutoScaleProperties, self).__init__(**kwargs) + self.min_node_count = min_node_count + self.enabled = enabled + self.max_node_count = max_node_count + + +class AvroDataset(Dataset): + """Avro dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Type of dataset.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. + :param description: Dataset description. :type description: str - :param parameters: Parameters for linked service. + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. + :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] - :param domain: Required. :code:``.azuredatabricks.net, domain name of your Databricks - deployment. Type: string (or Expression with resultType string). - :type domain: object - :param access_token: Required. Access token for databricks REST API. Refer to - https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string (or Expression - with resultType string). - :type access_token: ~azure.synapse.artifacts.models.SecretBase - :param existing_cluster_id: The id of an existing interactive cluster that will be used for all - runs of this activity. Type: string (or Expression with resultType string). - :type existing_cluster_id: object - :param instance_pool_id: The id of an existing instance pool that will be used for all runs of - this activity. Type: string (or Expression with resultType string). - :type instance_pool_id: object - :param new_cluster_version: If not using an existing interactive cluster, this specifies the - Spark version of a new job cluster or instance pool nodes created for each run of this - activity. Required if instancePoolId is specified. Type: string (or Expression with resultType - string). - :type new_cluster_version: object - :param new_cluster_num_of_worker: If not using an existing interactive cluster, this specifies - the number of worker nodes to use for the new job cluster or instance pool. For new job - clusters, this a string-formatted Int32, like '1' means numOfWorker is 1 or '1:10' means auto- - scale from 1 (min) to 10 (max). For instance pools, this is a string-formatted Int32, and can - only specify a fixed number of worker nodes, such as '2'. Required if newClusterVersion is - specified. Type: string (or Expression with resultType string). - :type new_cluster_num_of_worker: object - :param new_cluster_node_type: The node type of the new job cluster. This property is required - if newClusterVersion is specified and instancePoolId is not specified. If instancePoolId is - specified, this property is ignored. Type: string (or Expression with resultType string). - :type new_cluster_node_type: object - :param new_cluster_spark_conf: A set of optional, user-specified Spark configuration key-value - pairs. - :type new_cluster_spark_conf: dict[str, object] - :param new_cluster_spark_env_vars: A set of optional, user-specified Spark environment - variables key-value pairs. - :type new_cluster_spark_env_vars: dict[str, object] - :param new_cluster_custom_tags: Additional tags for cluster resources. This property is ignored - in instance pool configurations. - :type new_cluster_custom_tags: dict[str, object] - :param new_cluster_driver_node_type: The driver node type for the new job cluster. This - property is ignored in instance pool configurations. Type: string (or Expression with - resultType string). - :type new_cluster_driver_node_type: object - :param new_cluster_init_scripts: User-defined initialization scripts for the new cluster. Type: - array of strings (or Expression with resultType array of strings). - :type new_cluster_init_scripts: object - :param new_cluster_enable_elastic_disk: Enable the elastic disk on the new cluster. This - property is now ignored, and takes the default elastic disk behavior in Databricks (elastic - disks are always enabled). Type: boolean (or Expression with resultType boolean). - :type new_cluster_enable_elastic_disk: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param location: The location of the avro storage. + :type location: ~azure.synapse.artifacts.models.DatasetLocation + :param avro_compression_codec: Possible values include: "none", "deflate", "snappy", "xz", + "bzip2". + :type avro_compression_codec: str or ~azure.synapse.artifacts.models.AvroCompressionCodec + :param avro_compression_level: + :type avro_compression_level: int """ _validation = { 'type': {'required': True}, - 'domain': {'required': True}, - 'access_token': {'required': True}, + 'linked_service_name': {'required': True}, + 'avro_compression_level': {'maximum': 9, 'minimum': 1}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'domain': {'key': 'typeProperties.domain', 'type': 'object'}, - 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, - 'existing_cluster_id': {'key': 'typeProperties.existingClusterId', 'type': 'object'}, - 'instance_pool_id': {'key': 'typeProperties.instancePoolId', 'type': 'object'}, - 'new_cluster_version': {'key': 'typeProperties.newClusterVersion', 'type': 'object'}, - 'new_cluster_num_of_worker': {'key': 'typeProperties.newClusterNumOfWorker', 'type': 'object'}, - 'new_cluster_node_type': {'key': 'typeProperties.newClusterNodeType', 'type': 'object'}, - 'new_cluster_spark_conf': {'key': 'typeProperties.newClusterSparkConf', 'type': '{object}'}, - 'new_cluster_spark_env_vars': {'key': 'typeProperties.newClusterSparkEnvVars', 'type': '{object}'}, - 'new_cluster_custom_tags': {'key': 'typeProperties.newClusterCustomTags', 'type': '{object}'}, - 'new_cluster_driver_node_type': {'key': 'typeProperties.newClusterDriverNodeType', 'type': 'object'}, - 'new_cluster_init_scripts': {'key': 'typeProperties.newClusterInitScripts', 'type': 'object'}, - 'new_cluster_enable_elastic_disk': {'key': 'typeProperties.newClusterEnableElasticDisk', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'avro_compression_codec': {'key': 'typeProperties.avroCompressionCodec', 'type': 'str'}, + 'avro_compression_level': {'key': 'typeProperties.avroCompressionLevel', 'type': 'int'}, } def __init__( self, *, - domain: object, - access_token: "SecretBase", + linked_service_name: "LinkedServiceReference", additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, - existing_cluster_id: Optional[object] = None, - instance_pool_id: Optional[object] = None, - new_cluster_version: Optional[object] = None, - new_cluster_num_of_worker: Optional[object] = None, - new_cluster_node_type: Optional[object] = None, - new_cluster_spark_conf: Optional[Dict[str, object]] = None, - new_cluster_spark_env_vars: Optional[Dict[str, object]] = None, - new_cluster_custom_tags: Optional[Dict[str, object]] = None, - new_cluster_driver_node_type: Optional[object] = None, - new_cluster_init_scripts: Optional[object] = None, - new_cluster_enable_elastic_disk: Optional[object] = None, - encrypted_credential: Optional[object] = None, + folder: Optional["DatasetFolder"] = None, + location: Optional["DatasetLocation"] = None, + avro_compression_codec: Optional[Union[str, "AvroCompressionCodec"]] = None, + avro_compression_level: Optional[int] = None, **kwargs ): - super(AzureDatabricksLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'AzureDatabricks' # type: str - self.domain = domain - self.access_token = access_token - self.existing_cluster_id = existing_cluster_id - self.instance_pool_id = instance_pool_id - self.new_cluster_version = new_cluster_version - self.new_cluster_num_of_worker = new_cluster_num_of_worker - self.new_cluster_node_type = new_cluster_node_type - self.new_cluster_spark_conf = new_cluster_spark_conf - self.new_cluster_spark_env_vars = new_cluster_spark_env_vars - self.new_cluster_custom_tags = new_cluster_custom_tags - self.new_cluster_driver_node_type = new_cluster_driver_node_type - self.new_cluster_init_scripts = new_cluster_init_scripts - self.new_cluster_enable_elastic_disk = new_cluster_enable_elastic_disk - self.encrypted_credential = encrypted_credential + super(AvroDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'Avro' # type: str + self.location = location + self.avro_compression_codec = avro_compression_codec + self.avro_compression_level = avro_compression_level -class ExecutionActivity(Activity): - """Base class for all execution activities. +class DatasetStorageFormat(msrest.serialization.Model): + """The format definition of a storage. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureDataExplorerCommandActivity, AzureFunctionActivity, AzureMLBatchExecutionActivity, AzureMLExecutePipelineActivity, AzureMLUpdateResourceActivity, CopyActivity, CustomActivity, DataLakeAnalyticsUSQLActivity, DatabricksNotebookActivity, DatabricksSparkJarActivity, DatabricksSparkPythonActivity, DeleteActivity, ExecuteDataFlowActivity, ExecuteSSISPackageActivity, GetMetadataActivity, HDInsightHiveActivity, HDInsightMapReduceActivity, HDInsightPigActivity, HDInsightSparkActivity, HDInsightStreamingActivity, LookupActivity, SqlServerStoredProcedureActivity, WebActivity. + sub-classes are: AvroFormat, JsonFormat, OrcFormat, ParquetFormat, TextFormat. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. Type of dataset storage format.Constant filled by server. :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param serializer: Serializer. Type: string (or Expression with resultType string). + :type serializer: object + :param deserializer: Deserializer. Type: string (or Expression with resultType string). + :type deserializer: object """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'serializer': {'key': 'serializer', 'type': 'object'}, + 'deserializer': {'key': 'deserializer', 'type': 'object'}, } _subtype_map = { - 'type': {'AzureDataExplorerCommand': 'AzureDataExplorerCommandActivity', 'AzureFunctionActivity': 'AzureFunctionActivity', 'AzureMLBatchExecution': 'AzureMLBatchExecutionActivity', 'AzureMLExecutePipeline': 'AzureMLExecutePipelineActivity', 'AzureMLUpdateResource': 'AzureMLUpdateResourceActivity', 'Copy': 'CopyActivity', 'Custom': 'CustomActivity', 'DataLakeAnalyticsU-SQL': 'DataLakeAnalyticsUSQLActivity', 'DatabricksNotebook': 'DatabricksNotebookActivity', 'DatabricksSparkJar': 'DatabricksSparkJarActivity', 'DatabricksSparkPython': 'DatabricksSparkPythonActivity', 'Delete': 'DeleteActivity', 'ExecuteDataFlow': 'ExecuteDataFlowActivity', 'ExecuteSSISPackage': 'ExecuteSSISPackageActivity', 'GetMetadata': 'GetMetadataActivity', 'HDInsightHive': 'HDInsightHiveActivity', 'HDInsightMapReduce': 'HDInsightMapReduceActivity', 'HDInsightPig': 'HDInsightPigActivity', 'HDInsightSpark': 'HDInsightSparkActivity', 'HDInsightStreaming': 'HDInsightStreamingActivity', 'Lookup': 'LookupActivity', 'SqlServerStoredProcedure': 'SqlServerStoredProcedureActivity', 'WebActivity': 'WebActivity'} + 'type': {'AvroFormat': 'AvroFormat', 'JsonFormat': 'JsonFormat', 'OrcFormat': 'OrcFormat', 'ParquetFormat': 'ParquetFormat', 'TextFormat': 'TextFormat'} } def __init__( self, *, - name: str, additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - depends_on: Optional[List["ActivityDependency"]] = None, - user_properties: Optional[List["UserProperty"]] = None, - linked_service_name: Optional["LinkedServiceReference"] = None, - policy: Optional["ActivityPolicy"] = None, + serializer: Optional[object] = None, + deserializer: Optional[object] = None, **kwargs ): - super(ExecutionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.type = 'Execution' # type: str - self.linked_service_name = linked_service_name - self.policy = policy + super(DatasetStorageFormat, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = 'DatasetStorageFormat' # type: str + self.serializer = serializer + self.deserializer = deserializer -class AzureDataExplorerCommandActivity(ExecutionActivity): - """Azure Data Explorer command activity. +class AvroFormat(DatasetStorageFormat): + """The data stored in Avro format. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. Type of dataset storage format.Constant filled by server. :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param command: Required. A control command, according to the Azure Data Explorer command - syntax. Type: string (or Expression with resultType string). - :type command: object - :param command_timeout: Control command timeout. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))..). - :type command_timeout: object + :param serializer: Serializer. Type: string (or Expression with resultType string). + :type serializer: object + :param deserializer: Deserializer. Type: string (or Expression with resultType string). + :type deserializer: object """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, - 'command': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'command': {'key': 'typeProperties.command', 'type': 'object'}, - 'command_timeout': {'key': 'typeProperties.commandTimeout', 'type': 'object'}, + 'serializer': {'key': 'serializer', 'type': 'object'}, + 'deserializer': {'key': 'deserializer', 'type': 'object'}, } def __init__( self, *, - name: str, - command: object, additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - depends_on: Optional[List["ActivityDependency"]] = None, - user_properties: Optional[List["UserProperty"]] = None, - linked_service_name: Optional["LinkedServiceReference"] = None, - policy: Optional["ActivityPolicy"] = None, - command_timeout: Optional[object] = None, + serializer: Optional[object] = None, + deserializer: Optional[object] = None, **kwargs ): - super(AzureDataExplorerCommandActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type = 'AzureDataExplorerCommand' # type: str - self.command = command - self.command_timeout = command_timeout + super(AvroFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs) + self.type = 'AvroFormat' # type: str -class AzureDataExplorerLinkedService(LinkedService): - """Azure Data Explorer (Kusto) linked service. +class CopySink(msrest.serialization.Model): + """A copy activity sink. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AvroSink, AzureBlobFSSink, AzureDataExplorerSink, AzureDataLakeStoreSink, AzureMySqlSink, AzurePostgreSqlSink, AzureQueueSink, AzureSearchIndexSink, AzureSqlSink, AzureTableSink, BinarySink, BlobSink, CommonDataServiceForAppsSink, CosmosDbMongoDbApiSink, CosmosDbSqlApiSink, DelimitedTextSink, DocumentDbCollectionSink, DynamicsCrmSink, DynamicsSink, FileSystemSink, InformixSink, JsonSink, MicrosoftAccessSink, OdbcSink, OracleSink, OrcSink, ParquetSink, SalesforceServiceCloudSink, SalesforceSink, SapCloudForCustomerSink, SqlDWSink, SqlMISink, SqlServerSink, SqlSink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Copy sink type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param endpoint: Required. The endpoint of Azure Data Explorer (the engine's endpoint). URL - will be in the format https://:code:``.:code:``.kusto.windows.net. - Type: string (or Expression with resultType string). - :type endpoint: object - :param service_principal_id: Required. The ID of the service principal used to authenticate - against Azure Data Explorer. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: Required. The key of the service principal used to authenticate - against Kusto. - :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase - :param database: Required. Database name for connection. Type: string (or Expression with - resultType string). - :type database: object - :param tenant: Required. The name or ID of the tenant to which the service principal belongs. - Type: string (or Expression with resultType string). - :type tenant: object + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object """ _validation = { 'type': {'required': True}, - 'endpoint': {'required': True}, - 'service_principal_id': {'required': True}, - 'service_principal_key': {'required': True}, - 'database': {'required': True}, - 'tenant': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + } + + _subtype_map = { + 'type': {'AvroSink': 'AvroSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'AzureQueueSink': 'AzureQueueSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureSqlSink': 'AzureSqlSink', 'AzureTableSink': 'AzureTableSink', 'BinarySink': 'BinarySink', 'BlobSink': 'BlobSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'CosmosDbSqlApiSink': 'CosmosDbSqlApiSink', 'DelimitedTextSink': 'DelimitedTextSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'FileSystemSink': 'FileSystemSink', 'InformixSink': 'InformixSink', 'JsonSink': 'JsonSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'OdbcSink': 'OdbcSink', 'OracleSink': 'OracleSink', 'OrcSink': 'OrcSink', 'ParquetSink': 'ParquetSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink'} } def __init__( self, *, - endpoint: object, - service_principal_id: object, - service_principal_key: "SecretBase", - database: object, - tenant: object, additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, - description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, **kwargs ): - super(AzureDataExplorerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'AzureDataExplorer' # type: str - self.endpoint = endpoint - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.database = database - self.tenant = tenant + super(CopySink, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = 'CopySink' # type: str + self.write_batch_size = write_batch_size + self.write_batch_timeout = write_batch_timeout + self.sink_retry_count = sink_retry_count + self.sink_retry_wait = sink_retry_wait + self.max_concurrent_connections = max_concurrent_connections -class AzureDataExplorerTableDataset(Dataset): - """The Azure Data Explorer (Kusto) dataset. +class AvroSink(CopySink): + """A copy activity Avro sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Copy sink type.Constant filled by server. :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table: The table name of the Azure Data Explorer database. Type: string (or Expression - with resultType string). - :type table: object + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param store_settings: Avro store settings. + :type store_settings: ~azure.synapse.artifacts.models.StoreWriteSettings + :param format_settings: Avro format settings. + :type format_settings: ~azure.synapse.artifacts.models.AvroWriteSettings """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'AvroWriteSettings'}, } def __init__( self, *, - linked_service_name: "LinkedServiceReference", additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, - table: Optional[object] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + store_settings: Optional["StoreWriteSettings"] = None, + format_settings: Optional["AvroWriteSettings"] = None, **kwargs ): - super(AzureDataExplorerTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'AzureDataExplorerTable' # type: str - self.table = table + super(AvroSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'AvroSink' # type: str + self.store_settings = store_settings + self.format_settings = format_settings -class AzureDataLakeAnalyticsLinkedService(LinkedService): - """Azure Data Lake Analytics linked service. +class AvroSource(CopySource): + """A copy activity Avro source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param account_name: Required. The Azure Data Lake Analytics account name. Type: string (or - Expression with resultType string). - :type account_name: object - :param service_principal_id: The ID of the application used to authenticate against the Azure - Data Lake Analytics account. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The Key of the application used to authenticate against the Azure - Data Lake Analytics account. - :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase - :param tenant: Required. The name or ID of the tenant to which the service principal belongs. - Type: string (or Expression with resultType string). - :type tenant: object - :param subscription_id: Data Lake Analytics account subscription ID (if different from Data - Factory account). Type: string (or Expression with resultType string). - :type subscription_id: object - :param resource_group_name: Data Lake Analytics account resource group name (if different from - Data Factory account). Type: string (or Expression with resultType string). - :type resource_group_name: object - :param data_lake_analytics_uri: Azure Data Lake Analytics URI Type: string (or Expression with - resultType string). - :type data_lake_analytics_uri: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param store_settings: Avro store settings. + :type store_settings: ~azure.synapse.artifacts.models.StoreReadSettings + """ _validation = { 'type': {'required': True}, - 'account_name': {'required': True}, - 'tenant': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'account_name': {'key': 'typeProperties.accountName', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'subscription_id': {'key': 'typeProperties.subscriptionId', 'type': 'object'}, - 'resource_group_name': {'key': 'typeProperties.resourceGroupName', 'type': 'object'}, - 'data_lake_analytics_uri': {'key': 'typeProperties.dataLakeAnalyticsUri', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, } def __init__( self, *, - account_name: object, - tenant: object, additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, - description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - service_principal_id: Optional[object] = None, - service_principal_key: Optional["SecretBase"] = None, - subscription_id: Optional[object] = None, - resource_group_name: Optional[object] = None, - data_lake_analytics_uri: Optional[object] = None, - encrypted_credential: Optional[object] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + store_settings: Optional["StoreReadSettings"] = None, **kwargs ): - super(AzureDataLakeAnalyticsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'AzureDataLakeAnalytics' # type: str - self.account_name = account_name - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.tenant = tenant - self.subscription_id = subscription_id - self.resource_group_name = resource_group_name - self.data_lake_analytics_uri = data_lake_analytics_uri - self.encrypted_credential = encrypted_credential + super(AvroSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'AvroSource' # type: str + self.store_settings = store_settings -class AzureDataLakeStoreLinkedService(LinkedService): - """Azure Data Lake Store linked service. +class FormatWriteSettings(msrest.serialization.Model): + """Format write settings. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AvroWriteSettings, DelimitedTextWriteSettings, JsonWriteSettings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The write setting type.Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'AvroWriteSettings': 'AvroWriteSettings', 'DelimitedTextWriteSettings': 'DelimitedTextWriteSettings', 'JsonWriteSettings': 'JsonWriteSettings'} + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + **kwargs + ): + super(FormatWriteSettings, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = 'FormatWriteSettings' # type: str + + +class AvroWriteSettings(FormatWriteSettings): + """Avro write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The write setting type.Constant filled by server. + :type type: str + :param record_name: Top level record name in write result, which is required in AVRO spec. + :type record_name: str + :param record_namespace: Record namespace in the write result. + :type record_namespace: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'record_name': {'key': 'recordName', 'type': 'str'}, + 'record_namespace': {'key': 'recordNamespace', 'type': 'str'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + record_name: Optional[str] = None, + record_namespace: Optional[str] = None, + **kwargs + ): + super(AvroWriteSettings, self).__init__(additional_properties=additional_properties, **kwargs) + self.type = 'AvroWriteSettings' # type: str + self.record_name = record_name + self.record_namespace = record_namespace + + +class AzureBatchLinkedService(LinkedService): + """Azure Batch linked service. All required parameters must be populated in order to send to Azure. @@ -1827,27 +1901,19 @@ class AzureDataLakeStoreLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param data_lake_store_uri: Required. Data Lake Store service URI. Type: string (or Expression - with resultType string). - :type data_lake_store_uri: object - :param service_principal_id: The ID of the application used to authenticate against the Azure - Data Lake Store account. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The Key of the application used to authenticate against the Azure - Data Lake Store account. - :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal belongs. Type: - string (or Expression with resultType string). - :type tenant: object - :param account_name: Data Lake Store account name. Type: string (or Expression with resultType - string). + :param account_name: Required. The Azure Batch account name. Type: string (or Expression with + resultType string). :type account_name: object - :param subscription_id: Data Lake Store account subscription ID (if different from Data Factory - account). Type: string (or Expression with resultType string). - :type subscription_id: object - :param resource_group_name: Data Lake Store account resource group name (if different from Data - Factory account). Type: string (or Expression with resultType string). - :type resource_group_name: object + :param access_key: The Azure Batch account access key. + :type access_key: ~azure.synapse.artifacts.models.SecretBase + :param batch_uri: Required. The Azure Batch URI. Type: string (or Expression with resultType + string). + :type batch_uri: object + :param pool_name: Required. The Azure Batch pool name. Type: string (or Expression with + resultType string). + :type pool_name: object + :param linked_service_name: Required. The Azure Storage linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -1856,7 +1922,10 @@ class AzureDataLakeStoreLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'data_lake_store_uri': {'required': True}, + 'account_name': {'required': True}, + 'batch_uri': {'required': True}, + 'pool_name': {'required': True}, + 'linked_service_name': {'required': True}, } _attribute_map = { @@ -1866,48 +1935,42 @@ class AzureDataLakeStoreLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'data_lake_store_uri': {'key': 'typeProperties.dataLakeStoreUri', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, 'account_name': {'key': 'typeProperties.accountName', 'type': 'object'}, - 'subscription_id': {'key': 'typeProperties.subscriptionId', 'type': 'object'}, - 'resource_group_name': {'key': 'typeProperties.resourceGroupName', 'type': 'object'}, + 'access_key': {'key': 'typeProperties.accessKey', 'type': 'SecretBase'}, + 'batch_uri': {'key': 'typeProperties.batchUri', 'type': 'object'}, + 'pool_name': {'key': 'typeProperties.poolName', 'type': 'object'}, + 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, *, - data_lake_store_uri: object, + account_name: object, + batch_uri: object, + pool_name: object, + linked_service_name: "LinkedServiceReference", additional_properties: Optional[Dict[str, object]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, - service_principal_id: Optional[object] = None, - service_principal_key: Optional["SecretBase"] = None, - tenant: Optional[object] = None, - account_name: Optional[object] = None, - subscription_id: Optional[object] = None, - resource_group_name: Optional[object] = None, + access_key: Optional["SecretBase"] = None, encrypted_credential: Optional[object] = None, **kwargs ): - super(AzureDataLakeStoreLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'AzureDataLakeStore' # type: str - self.data_lake_store_uri = data_lake_store_uri - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.tenant = tenant + super(AzureBatchLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'AzureBatch' # type: str self.account_name = account_name - self.subscription_id = subscription_id - self.resource_group_name = resource_group_name + self.access_key = access_key + self.batch_uri = batch_uri + self.pool_name = pool_name + self.linked_service_name = linked_service_name self.encrypted_credential = encrypted_credential -class AzureFileStorageLinkedService(LinkedService): - """Azure File Storage linked service. +class AzureBlobFSLinkedService(LinkedService): + """Azure Data Lake Storage Gen2 linked service. All required parameters must be populated in order to send to Azure. @@ -1924,14 +1987,21 @@ class AzureFileStorageLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param host: Required. Host name of the server. Type: string (or Expression with resultType - string). - :type host: object - :param user_id: User ID to logon the server. Type: string (or Expression with resultType - string). - :type user_id: object - :param password: Password to logon the server. - :type password: ~azure.synapse.artifacts.models.SecretBase + :param url: Required. Endpoint for the Azure Data Lake Storage Gen2 service. Type: string (or + Expression with resultType string). + :type url: object + :param account_key: Account key for the Azure Data Lake Storage Gen2 service. Type: string (or + Expression with resultType string). + :type account_key: object + :param service_principal_id: The ID of the application used to authenticate against the Azure + Data Lake Storage Gen2 account. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The Key of the application used to authenticate against the Azure + Data Lake Storage Gen2 account. + :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal belongs. Type: + string (or Expression with resultType string). + :type tenant: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -1940,7 +2010,7 @@ class AzureFileStorageLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'host': {'required': True}, + 'url': {'required': True}, } _attribute_map = { @@ -1950,247 +2020,247 @@ class AzureFileStorageLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'user_id': {'key': 'typeProperties.userId', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'account_key': {'key': 'typeProperties.accountKey', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, *, - host: object, + url: object, additional_properties: Optional[Dict[str, object]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, - user_id: Optional[object] = None, - password: Optional["SecretBase"] = None, + account_key: Optional[object] = None, + service_principal_id: Optional[object] = None, + service_principal_key: Optional["SecretBase"] = None, + tenant: Optional[object] = None, encrypted_credential: Optional[object] = None, **kwargs ): - super(AzureFileStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'AzureFileStorage' # type: str - self.host = host - self.user_id = user_id - self.password = password + super(AzureBlobFSLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'AzureBlobFS' # type: str + self.url = url + self.account_key = account_key + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant self.encrypted_credential = encrypted_credential -class AzureFunctionActivity(ExecutionActivity): - """Azure Function activity. +class AzureBlobFSLocation(DatasetLocation): + """The location of azure blobFS dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. Type of dataset storage location.Constant filled by server. :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param method: Required. Rest API method for target endpoint. Possible values include: "GET", - "POST", "PUT", "DELETE", "OPTIONS", "HEAD", "TRACE". - :type method: str or ~azure.synapse.artifacts.models.AzureFunctionActivityMethod - :param function_name: Required. Name of the Function that the Azure Function Activity will - call. Type: string (or Expression with resultType string). - :type function_name: object - :param headers: Represents the headers that will be sent to the request. For example, to set - the language and type on a request: "headers" : { "Accept-Language": "en-us", "Content-Type": - "application/json" }. Type: string (or Expression with resultType string). - :type headers: object - :param body: Represents the payload that will be sent to the endpoint. Required for POST/PUT - method, not allowed for GET method Type: string (or Expression with resultType string). - :type body: object + :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + string). + :type file_name: object + :param file_system: Specify the fileSystem of azure blobFS. Type: string (or Expression with + resultType string). + :type file_system: object """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, - 'method': {'required': True}, - 'function_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'method': {'key': 'typeProperties.method', 'type': 'str'}, - 'function_name': {'key': 'typeProperties.functionName', 'type': 'object'}, - 'headers': {'key': 'typeProperties.headers', 'type': 'object'}, - 'body': {'key': 'typeProperties.body', 'type': 'object'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'file_system': {'key': 'fileSystem', 'type': 'object'}, } def __init__( self, *, - name: str, - method: Union[str, "AzureFunctionActivityMethod"], - function_name: object, additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - depends_on: Optional[List["ActivityDependency"]] = None, - user_properties: Optional[List["UserProperty"]] = None, - linked_service_name: Optional["LinkedServiceReference"] = None, - policy: Optional["ActivityPolicy"] = None, - headers: Optional[object] = None, - body: Optional[object] = None, + folder_path: Optional[object] = None, + file_name: Optional[object] = None, + file_system: Optional[object] = None, **kwargs ): - super(AzureFunctionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type = 'AzureFunctionActivity' # type: str - self.method = method - self.function_name = function_name - self.headers = headers - self.body = body + super(AzureBlobFSLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) + self.type = 'AzureBlobFSLocation' # type: str + self.file_system = file_system -class AzureFunctionLinkedService(LinkedService): - """Azure Function linked service. +class AzureBlobFSReadSettings(StoreReadSettings): + """Azure blobFS read settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. The read setting type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param function_app_url: Required. The endpoint of the Azure Function App. URL will be in the - format https://:code:``.azurewebsites.net. - :type function_app_url: object - :param function_key: Function or Host key for Azure Function App. - :type function_key: ~azure.synapse.artifacts.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :type recursive: object + :param wildcard_folder_path: Azure blobFS wildcardFolderPath. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type wildcard_folder_path: object + :param wildcard_file_name: Azure blobFS wildcardFileName. Type: string (or Expression with + resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. Type: string (or + Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression + with resultType string). + :type modified_datetime_end: object """ _validation = { 'type': {'required': True}, - 'function_app_url': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'function_app_url': {'key': 'typeProperties.functionAppUrl', 'type': 'object'}, - 'function_key': {'key': 'typeProperties.functionKey', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } def __init__( self, *, - function_app_url: object, additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, - description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - function_key: Optional["SecretBase"] = None, - encrypted_credential: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + recursive: Optional[object] = None, + wildcard_folder_path: Optional[object] = None, + wildcard_file_name: Optional[object] = None, + enable_partition_discovery: Optional[bool] = None, + modified_datetime_start: Optional[object] = None, + modified_datetime_end: Optional[object] = None, **kwargs ): - super(AzureFunctionLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'AzureFunction' # type: str - self.function_app_url = function_app_url - self.function_key = function_key - self.encrypted_credential = encrypted_credential + super(AzureBlobFSReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'AzureBlobFSReadSettings' # type: str + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.enable_partition_discovery = enable_partition_discovery + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end -class AzureKeyVaultLinkedService(LinkedService): - """Azure Key Vault linked service. +class AzureBlobFSSink(CopySink): + """A copy activity Azure Data Lake Storage Gen2 sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Copy sink type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param base_url: Required. The base URL of the Azure Key Vault. e.g. - https://myakv.vault.azure.net Type: string (or Expression with resultType string). - :type base_url: object + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object """ _validation = { 'type': {'required': True}, - 'base_url': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'base_url': {'key': 'typeProperties.baseUrl', 'type': 'object'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } def __init__( self, *, - base_url: object, additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, - description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + copy_behavior: Optional[object] = None, **kwargs ): - super(AzureKeyVaultLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'AzureKeyVault' # type: str - self.base_url = base_url - + super(AzureBlobFSSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'AzureBlobFSSink' # type: str + self.copy_behavior = copy_behavior -class SecretBase(msrest.serialization.Model): - """The base definition of a secret type. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureKeyVaultSecretReference, SecureString. +class AzureBlobFSSource(CopySource): + """A copy activity Azure BlobFS source. All required parameters must be populated in order to send to Azure. - :param type: Required. Type of the secret.Constant filled by server. + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param treat_empty_as_null: Treat empty as null. Type: boolean (or Expression with resultType + boolean). + :type treat_empty_as_null: object + :param skip_header_line_count: Number of header lines to skip from each blob. Type: integer (or + Expression with resultType integer). + :type skip_header_line_count: object + :param recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :type recursive: object """ _validation = { @@ -2198,93 +2268,103 @@ class SecretBase(msrest.serialization.Model): } _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'AzureKeyVaultSecret': 'AzureKeyVaultSecretReference', 'SecureString': 'SecureString'} + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, + 'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, } def __init__( self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + treat_empty_as_null: Optional[object] = None, + skip_header_line_count: Optional[object] = None, + recursive: Optional[object] = None, **kwargs ): - super(SecretBase, self).__init__(**kwargs) - self.type = None # type: Optional[str] + super(AzureBlobFSSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'AzureBlobFSSource' # type: str + self.treat_empty_as_null = treat_empty_as_null + self.skip_header_line_count = skip_header_line_count + self.recursive = recursive -class AzureKeyVaultSecretReference(SecretBase): - """Azure Key Vault secret reference. +class StoreWriteSettings(msrest.serialization.Model): + """Connector write settings. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AzureBlobFSWriteSettings, AzureBlobStorageWriteSettings, AzureDataLakeStoreWriteSettings, FileServerWriteSettings, SftpWriteSettings. All required parameters must be populated in order to send to Azure. - :param type: Required. Type of the secret.Constant filled by server. + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The write setting type.Constant filled by server. :type type: str - :param store: Required. The Azure Key Vault linked service reference. - :type store: ~azure.synapse.artifacts.models.LinkedServiceReference - :param secret_name: Required. The name of the secret in Azure Key Vault. Type: string (or - Expression with resultType string). - :type secret_name: object - :param secret_version: The version of the secret in Azure Key Vault. The default value is the - latest version of the secret. Type: string (or Expression with resultType string). - :type secret_version: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object """ _validation = { 'type': {'required': True}, - 'store': {'required': True}, - 'secret_name': {'required': True}, } _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'store': {'key': 'store', 'type': 'LinkedServiceReference'}, - 'secret_name': {'key': 'secretName', 'type': 'object'}, - 'secret_version': {'key': 'secretVersion', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + _subtype_map = { + 'type': {'AzureBlobFSWriteSettings': 'AzureBlobFSWriteSettings', 'AzureBlobStorageWriteSettings': 'AzureBlobStorageWriteSettings', 'AzureDataLakeStoreWriteSettings': 'AzureDataLakeStoreWriteSettings', 'FileServerWriteSettings': 'FileServerWriteSettings', 'SftpWriteSettings': 'SftpWriteSettings'} } def __init__( self, *, - store: "LinkedServiceReference", - secret_name: object, - secret_version: Optional[object] = None, + additional_properties: Optional[Dict[str, object]] = None, + max_concurrent_connections: Optional[object] = None, + copy_behavior: Optional[object] = None, **kwargs ): - super(AzureKeyVaultSecretReference, self).__init__(**kwargs) - self.type = 'AzureKeyVaultSecret' # type: str - self.store = store - self.secret_name = secret_name - self.secret_version = secret_version + super(StoreWriteSettings, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = 'StoreWriteSettings' # type: str + self.max_concurrent_connections = max_concurrent_connections + self.copy_behavior = copy_behavior -class AzureMariaDBLinkedService(LinkedService): - """Azure Database for MariaDB linked service. +class AzureBlobFSWriteSettings(StoreWriteSettings): + """Azure blobFS write settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. The write setting type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: An ODBC connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param pwd: The Azure key vault secret reference of password in connection string. - :type pwd: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + :param block_size_in_mb: Indicates the block size(MB) when writing data to blob. Type: integer + (or Expression with resultType integer). + :type block_size_in_mb: object """ _validation = { @@ -2294,370 +2374,298 @@ class AzureMariaDBLinkedService(LinkedService): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'block_size_in_mb': {'key': 'blockSizeInMB', 'type': 'object'}, } def __init__( self, *, additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, - description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - connection_string: Optional[object] = None, - pwd: Optional["AzureKeyVaultSecretReference"] = None, - encrypted_credential: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + copy_behavior: Optional[object] = None, + block_size_in_mb: Optional[object] = None, **kwargs ): - super(AzureMariaDBLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'AzureMariaDB' # type: str - self.connection_string = connection_string - self.pwd = pwd - self.encrypted_credential = encrypted_credential + super(AzureBlobFSWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + self.type = 'AzureBlobFSWriteSettings' # type: str + self.block_size_in_mb = block_size_in_mb -class AzureMariaDBTableDataset(Dataset): - """Azure Database for MariaDB dataset. +class AzureBlobStorageLinkedService(LinkedService): + """The azure blob storage linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Type of linked service.Constant filled by server. :type type: str - :param description: Dataset description. + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. + :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. + :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :param connection_string: The connection string. It is mutually exclusive with sasUri, + serviceEndpoint property. Type: string, SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param account_key: The Azure key vault secret reference of accountKey in connection string. + :type account_key: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param sas_uri: SAS URI of the Azure Blob Storage resource. It is mutually exclusive with + connectionString, serviceEndpoint property. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type sas_uri: object + :param sas_token: The Azure key vault secret reference of sasToken in sas uri. + :type sas_token: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param service_endpoint: Blob service endpoint of the Azure Blob Storage resource. It is + mutually exclusive with connectionString, sasUri property. + :type service_endpoint: str + :param service_principal_id: The ID of the service principal used to authenticate against Azure + SQL Data Warehouse. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The key of the service principal used to authenticate against + Azure SQL Data Warehouse. + :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal belongs. Type: + string (or Expression with resultType string). + :type tenant: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: str """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, + 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, + 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, + 'service_endpoint': {'key': 'typeProperties.serviceEndpoint', 'type': 'str'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, } def __init__( self, *, - linked_service_name: "LinkedServiceReference", additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, + connection_string: Optional[object] = None, + account_key: Optional["AzureKeyVaultSecretReference"] = None, + sas_uri: Optional[object] = None, + sas_token: Optional["AzureKeyVaultSecretReference"] = None, + service_endpoint: Optional[str] = None, + service_principal_id: Optional[object] = None, + service_principal_key: Optional["SecretBase"] = None, + tenant: Optional[object] = None, + encrypted_credential: Optional[str] = None, **kwargs ): - super(AzureMariaDBTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'AzureMariaDBTable' # type: str - self.table_name = table_name + super(AzureBlobStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'AzureBlobStorage' # type: str + self.connection_string = connection_string + self.account_key = account_key + self.sas_uri = sas_uri + self.sas_token = sas_token + self.service_endpoint = service_endpoint + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.encrypted_credential = encrypted_credential -class AzureMLBatchExecutionActivity(ExecutionActivity): - """Azure ML Batch Execution activity. +class AzureBlobStorageLocation(DatasetLocation): + """The location of azure blob dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. Type of dataset storage location.Constant filled by server. :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param global_parameters: Key,Value pairs to be passed to the Azure ML Batch Execution Service - endpoint. Keys must match the names of web service parameters defined in the published Azure ML - web service. Values will be passed in the GlobalParameters property of the Azure ML batch - execution request. - :type global_parameters: dict[str, object] - :param web_service_outputs: Key,Value pairs, mapping the names of Azure ML endpoint's Web - Service Outputs to AzureMLWebServiceFile objects specifying the output Blob locations. This - information will be passed in the WebServiceOutputs property of the Azure ML batch execution - request. - :type web_service_outputs: dict[str, ~azure.synapse.artifacts.models.AzureMLWebServiceFile] - :param web_service_inputs: Key,Value pairs, mapping the names of Azure ML endpoint's Web - Service Inputs to AzureMLWebServiceFile objects specifying the input Blob locations.. This - information will be passed in the WebServiceInputs property of the Azure ML batch execution - request. - :type web_service_inputs: dict[str, ~azure.synapse.artifacts.models.AzureMLWebServiceFile] + :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + string). + :type file_name: object + :param container: Specify the container of azure blob. Type: string (or Expression with + resultType string). + :type container: object """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'global_parameters': {'key': 'typeProperties.globalParameters', 'type': '{object}'}, - 'web_service_outputs': {'key': 'typeProperties.webServiceOutputs', 'type': '{AzureMLWebServiceFile}'}, - 'web_service_inputs': {'key': 'typeProperties.webServiceInputs', 'type': '{AzureMLWebServiceFile}'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'container': {'key': 'container', 'type': 'object'}, } def __init__( self, *, - name: str, additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - depends_on: Optional[List["ActivityDependency"]] = None, - user_properties: Optional[List["UserProperty"]] = None, - linked_service_name: Optional["LinkedServiceReference"] = None, - policy: Optional["ActivityPolicy"] = None, - global_parameters: Optional[Dict[str, object]] = None, - web_service_outputs: Optional[Dict[str, "AzureMLWebServiceFile"]] = None, - web_service_inputs: Optional[Dict[str, "AzureMLWebServiceFile"]] = None, + folder_path: Optional[object] = None, + file_name: Optional[object] = None, + container: Optional[object] = None, **kwargs ): - super(AzureMLBatchExecutionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type = 'AzureMLBatchExecution' # type: str - self.global_parameters = global_parameters - self.web_service_outputs = web_service_outputs - self.web_service_inputs = web_service_inputs + super(AzureBlobStorageLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) + self.type = 'AzureBlobStorageLocation' # type: str + self.container = container -class AzureMLExecutePipelineActivity(ExecutionActivity): - """Azure ML Execute Pipeline activity. +class AzureBlobStorageReadSettings(StoreReadSettings): + """Azure blob read settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. The read setting type.Constant filled by server. :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param ml_pipeline_id: Required. ID of the published Azure ML pipeline. Type: string (or + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :type recursive: object + :param wildcard_folder_path: Azure blob wildcardFolderPath. Type: string (or Expression with + resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Azure blob wildcardFileName. Type: string (or Expression with + resultType string). + :type wildcard_file_name: object + :param prefix: The prefix filter for the Azure Blob name. Type: string (or Expression with + resultType string). + :type prefix: object + :param enable_partition_discovery: Indicates whether to enable partition discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). - :type ml_pipeline_id: object - :param experiment_name: Run history experiment name of the pipeline run. This information will - be passed in the ExperimentName property of the published pipeline execution request. Type: - string (or Expression with resultType string). - :type experiment_name: object - :param ml_pipeline_parameters: Key,Value pairs to be passed to the published Azure ML pipeline - endpoint. Keys must match the names of pipeline parameters defined in the published pipeline. - Values will be passed in the ParameterAssignments property of the published pipeline execution - request. Type: object with key value pairs (or Expression with resultType object). - :type ml_pipeline_parameters: object - :param ml_parent_run_id: The parent Azure ML Service pipeline run id. This information will be - passed in the ParentRunId property of the published pipeline execution request. Type: string - (or Expression with resultType string). - :type ml_parent_run_id: object - :param continue_on_step_failure: Whether to continue execution of other steps in the - PipelineRun if a step fails. This information will be passed in the continueOnStepFailure - property of the published pipeline execution request. Type: boolean (or Expression with - resultType boolean). - :type continue_on_step_failure: object + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression + with resultType string). + :type modified_datetime_end: object """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, - 'ml_pipeline_id': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'ml_pipeline_id': {'key': 'typeProperties.mlPipelineId', 'type': 'object'}, - 'experiment_name': {'key': 'typeProperties.experimentName', 'type': 'object'}, - 'ml_pipeline_parameters': {'key': 'typeProperties.mlPipelineParameters', 'type': 'object'}, - 'ml_parent_run_id': {'key': 'typeProperties.mlParentRunId', 'type': 'object'}, - 'continue_on_step_failure': {'key': 'typeProperties.continueOnStepFailure', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'prefix': {'key': 'prefix', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } def __init__( self, *, - name: str, - ml_pipeline_id: object, additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - depends_on: Optional[List["ActivityDependency"]] = None, - user_properties: Optional[List["UserProperty"]] = None, - linked_service_name: Optional["LinkedServiceReference"] = None, - policy: Optional["ActivityPolicy"] = None, - experiment_name: Optional[object] = None, - ml_pipeline_parameters: Optional[object] = None, - ml_parent_run_id: Optional[object] = None, - continue_on_step_failure: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + recursive: Optional[object] = None, + wildcard_folder_path: Optional[object] = None, + wildcard_file_name: Optional[object] = None, + prefix: Optional[object] = None, + enable_partition_discovery: Optional[bool] = None, + modified_datetime_start: Optional[object] = None, + modified_datetime_end: Optional[object] = None, **kwargs ): - super(AzureMLExecutePipelineActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type = 'AzureMLExecutePipeline' # type: str - self.ml_pipeline_id = ml_pipeline_id - self.experiment_name = experiment_name - self.ml_pipeline_parameters = ml_pipeline_parameters - self.ml_parent_run_id = ml_parent_run_id - self.continue_on_step_failure = continue_on_step_failure + super(AzureBlobStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'AzureBlobStorageReadSettings' # type: str + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.prefix = prefix + self.enable_partition_discovery = enable_partition_discovery + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end -class AzureMLLinkedService(LinkedService): - """Azure ML Studio Web Service linked service. +class AzureBlobStorageWriteSettings(StoreWriteSettings): + """Azure blob write settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. The write setting type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param ml_endpoint: Required. The Batch Execution REST URL for an Azure ML Studio Web Service - endpoint. Type: string (or Expression with resultType string). - :type ml_endpoint: object - :param api_key: Required. The API key for accessing the Azure ML model endpoint. - :type api_key: ~azure.synapse.artifacts.models.SecretBase - :param update_resource_endpoint: The Update Resource REST URL for an Azure ML Studio Web - Service endpoint. Type: string (or Expression with resultType string). - :type update_resource_endpoint: object - :param service_principal_id: The ID of the service principal used to authenticate against the - ARM-based updateResourceEndpoint of an Azure ML Studio web service. Type: string (or Expression - with resultType string). - :type service_principal_id: object - :param service_principal_key: The key of the service principal used to authenticate against the - ARM-based updateResourceEndpoint of an Azure ML Studio web service. - :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal belongs. Type: - string (or Expression with resultType string). - :type tenant: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + :param block_size_in_mb: Indicates the block size(MB) when writing data to blob. Type: integer + (or Expression with resultType integer). + :type block_size_in_mb: object """ _validation = { 'type': {'required': True}, - 'ml_endpoint': {'required': True}, - 'api_key': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'ml_endpoint': {'key': 'typeProperties.mlEndpoint', 'type': 'object'}, - 'api_key': {'key': 'typeProperties.apiKey', 'type': 'SecretBase'}, - 'update_resource_endpoint': {'key': 'typeProperties.updateResourceEndpoint', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'block_size_in_mb': {'key': 'blockSizeInMB', 'type': 'object'}, } def __init__( self, *, - ml_endpoint: object, - api_key: "SecretBase", additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, - description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - update_resource_endpoint: Optional[object] = None, - service_principal_id: Optional[object] = None, - service_principal_key: Optional["SecretBase"] = None, - tenant: Optional[object] = None, - encrypted_credential: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + copy_behavior: Optional[object] = None, + block_size_in_mb: Optional[object] = None, **kwargs ): - super(AzureMLLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'AzureML' # type: str - self.ml_endpoint = ml_endpoint - self.api_key = api_key - self.update_resource_endpoint = update_resource_endpoint - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.tenant = tenant - self.encrypted_credential = encrypted_credential + super(AzureBlobStorageWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + self.type = 'AzureBlobStorageWriteSettings' # type: str + self.block_size_in_mb = block_size_in_mb -class AzureMLServiceLinkedService(LinkedService): - """Azure ML Service linked service. +class AzureDatabricksLinkedService(LinkedService): + """Azure Databricks linked service. All required parameters must be populated in order to send to Azure. @@ -2674,25 +2682,55 @@ class AzureMLServiceLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param subscription_id: Required. Azure ML Service workspace subscription ID. Type: string (or - Expression with resultType string). - :type subscription_id: object - :param resource_group_name: Required. Azure ML Service workspace resource group name. Type: - string (or Expression with resultType string). - :type resource_group_name: object - :param ml_workspace_name: Required. Azure ML Service workspace name. Type: string (or - Expression with resultType string). - :type ml_workspace_name: object - :param service_principal_id: The ID of the service principal used to authenticate against the - endpoint of a published Azure ML Service pipeline. Type: string (or Expression with resultType + :param domain: Required. :code:``.azuredatabricks.net, domain name of your Databricks + deployment. Type: string (or Expression with resultType string). + :type domain: object + :param access_token: Required. Access token for databricks REST API. Refer to + https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string (or Expression + with resultType string). + :type access_token: ~azure.synapse.artifacts.models.SecretBase + :param existing_cluster_id: The id of an existing interactive cluster that will be used for all + runs of this activity. Type: string (or Expression with resultType string). + :type existing_cluster_id: object + :param instance_pool_id: The id of an existing instance pool that will be used for all runs of + this activity. Type: string (or Expression with resultType string). + :type instance_pool_id: object + :param new_cluster_version: If not using an existing interactive cluster, this specifies the + Spark version of a new job cluster or instance pool nodes created for each run of this + activity. Required if instancePoolId is specified. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The key of the service principal used to authenticate against the - endpoint of a published Azure ML Service pipeline. - :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal belongs. Type: - string (or Expression with resultType string). - :type tenant: object + :type new_cluster_version: object + :param new_cluster_num_of_worker: If not using an existing interactive cluster, this specifies + the number of worker nodes to use for the new job cluster or instance pool. For new job + clusters, this a string-formatted Int32, like '1' means numOfWorker is 1 or '1:10' means auto- + scale from 1 (min) to 10 (max). For instance pools, this is a string-formatted Int32, and can + only specify a fixed number of worker nodes, such as '2'. Required if newClusterVersion is + specified. Type: string (or Expression with resultType string). + :type new_cluster_num_of_worker: object + :param new_cluster_node_type: The node type of the new job cluster. This property is required + if newClusterVersion is specified and instancePoolId is not specified. If instancePoolId is + specified, this property is ignored. Type: string (or Expression with resultType string). + :type new_cluster_node_type: object + :param new_cluster_spark_conf: A set of optional, user-specified Spark configuration key-value + pairs. + :type new_cluster_spark_conf: dict[str, object] + :param new_cluster_spark_env_vars: A set of optional, user-specified Spark environment + variables key-value pairs. + :type new_cluster_spark_env_vars: dict[str, object] + :param new_cluster_custom_tags: Additional tags for cluster resources. This property is ignored + in instance pool configurations. + :type new_cluster_custom_tags: dict[str, object] + :param new_cluster_driver_node_type: The driver node type for the new job cluster. This + property is ignored in instance pool configurations. Type: string (or Expression with + resultType string). + :type new_cluster_driver_node_type: object + :param new_cluster_init_scripts: User-defined initialization scripts for the new cluster. Type: + array of strings (or Expression with resultType array of strings). + :type new_cluster_init_scripts: object + :param new_cluster_enable_elastic_disk: Enable the elastic disk on the new cluster. This + property is now ignored, and takes the default elastic disk behavior in Databricks (elastic + disks are always enabled). Type: boolean (or Expression with resultType boolean). + :type new_cluster_enable_elastic_disk: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -2701,9 +2739,8 @@ class AzureMLServiceLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'subscription_id': {'required': True}, - 'resource_group_name': {'required': True}, - 'ml_workspace_name': {'required': True}, + 'domain': {'required': True}, + 'access_token': {'required': True}, } _attribute_map = { @@ -2713,45 +2750,69 @@ class AzureMLServiceLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'subscription_id': {'key': 'typeProperties.subscriptionId', 'type': 'object'}, - 'resource_group_name': {'key': 'typeProperties.resourceGroupName', 'type': 'object'}, - 'ml_workspace_name': {'key': 'typeProperties.mlWorkspaceName', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'domain': {'key': 'typeProperties.domain', 'type': 'object'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'existing_cluster_id': {'key': 'typeProperties.existingClusterId', 'type': 'object'}, + 'instance_pool_id': {'key': 'typeProperties.instancePoolId', 'type': 'object'}, + 'new_cluster_version': {'key': 'typeProperties.newClusterVersion', 'type': 'object'}, + 'new_cluster_num_of_worker': {'key': 'typeProperties.newClusterNumOfWorker', 'type': 'object'}, + 'new_cluster_node_type': {'key': 'typeProperties.newClusterNodeType', 'type': 'object'}, + 'new_cluster_spark_conf': {'key': 'typeProperties.newClusterSparkConf', 'type': '{object}'}, + 'new_cluster_spark_env_vars': {'key': 'typeProperties.newClusterSparkEnvVars', 'type': '{object}'}, + 'new_cluster_custom_tags': {'key': 'typeProperties.newClusterCustomTags', 'type': '{object}'}, + 'new_cluster_driver_node_type': {'key': 'typeProperties.newClusterDriverNodeType', 'type': 'object'}, + 'new_cluster_init_scripts': {'key': 'typeProperties.newClusterInitScripts', 'type': 'object'}, + 'new_cluster_enable_elastic_disk': {'key': 'typeProperties.newClusterEnableElasticDisk', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, *, - subscription_id: object, - resource_group_name: object, - ml_workspace_name: object, + domain: object, + access_token: "SecretBase", additional_properties: Optional[Dict[str, object]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, - service_principal_id: Optional[object] = None, - service_principal_key: Optional["SecretBase"] = None, - tenant: Optional[object] = None, + existing_cluster_id: Optional[object] = None, + instance_pool_id: Optional[object] = None, + new_cluster_version: Optional[object] = None, + new_cluster_num_of_worker: Optional[object] = None, + new_cluster_node_type: Optional[object] = None, + new_cluster_spark_conf: Optional[Dict[str, object]] = None, + new_cluster_spark_env_vars: Optional[Dict[str, object]] = None, + new_cluster_custom_tags: Optional[Dict[str, object]] = None, + new_cluster_driver_node_type: Optional[object] = None, + new_cluster_init_scripts: Optional[object] = None, + new_cluster_enable_elastic_disk: Optional[object] = None, encrypted_credential: Optional[object] = None, **kwargs ): - super(AzureMLServiceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'AzureMLService' # type: str - self.subscription_id = subscription_id - self.resource_group_name = resource_group_name - self.ml_workspace_name = ml_workspace_name - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.tenant = tenant + super(AzureDatabricksLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'AzureDatabricks' # type: str + self.domain = domain + self.access_token = access_token + self.existing_cluster_id = existing_cluster_id + self.instance_pool_id = instance_pool_id + self.new_cluster_version = new_cluster_version + self.new_cluster_num_of_worker = new_cluster_num_of_worker + self.new_cluster_node_type = new_cluster_node_type + self.new_cluster_spark_conf = new_cluster_spark_conf + self.new_cluster_spark_env_vars = new_cluster_spark_env_vars + self.new_cluster_custom_tags = new_cluster_custom_tags + self.new_cluster_driver_node_type = new_cluster_driver_node_type + self.new_cluster_init_scripts = new_cluster_init_scripts + self.new_cluster_enable_elastic_disk = new_cluster_enable_elastic_disk self.encrypted_credential = encrypted_credential -class AzureMLUpdateResourceActivity(ExecutionActivity): - """Azure ML Update Resource management activity. +class ExecutionActivity(Activity): + """Base class for all execution activities. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AzureDataExplorerCommandActivity, AzureFunctionActivity, AzureMLBatchExecutionActivity, AzureMLExecutePipelineActivity, AzureMLUpdateResourceActivity, CopyActivity, CustomActivity, DataLakeAnalyticsUSQLActivity, DatabricksNotebookActivity, DatabricksSparkJarActivity, DatabricksSparkPythonActivity, DeleteActivity, ExecuteDataFlowActivity, ExecuteSSISPackageActivity, GetMetadataActivity, HDInsightHiveActivity, HDInsightMapReduceActivity, HDInsightPigActivity, HDInsightSparkActivity, HDInsightStreamingActivity, LookupActivity, SqlServerStoredProcedureActivity, WebActivity. All required parameters must be populated in order to send to Azure. @@ -2772,24 +2833,11 @@ class AzureMLUpdateResourceActivity(ExecutionActivity): :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param trained_model_name: Required. Name of the Trained Model module in the Web Service - experiment to be updated. Type: string (or Expression with resultType string). - :type trained_model_name: object - :param trained_model_linked_service_name: Required. Name of Azure Storage linked service - holding the .ilearner file that will be uploaded by the update operation. - :type trained_model_linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param trained_model_file_path: Required. The relative file path in trainedModelLinkedService - to represent the .ilearner file that will be uploaded by the update operation. Type: string - (or Expression with resultType string). - :type trained_model_file_path: object """ _validation = { 'name': {'required': True}, 'type': {'required': True}, - 'trained_model_name': {'required': True}, - 'trained_model_linked_service_name': {'required': True}, - 'trained_model_file_path': {'required': True}, } _attribute_map = { @@ -2801,18 +2849,16 @@ class AzureMLUpdateResourceActivity(ExecutionActivity): 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'trained_model_name': {'key': 'typeProperties.trainedModelName', 'type': 'object'}, - 'trained_model_linked_service_name': {'key': 'typeProperties.trainedModelLinkedServiceName', 'type': 'LinkedServiceReference'}, - 'trained_model_file_path': {'key': 'typeProperties.trainedModelFilePath', 'type': 'object'}, + } + + _subtype_map = { + 'type': {'AzureDataExplorerCommand': 'AzureDataExplorerCommandActivity', 'AzureFunctionActivity': 'AzureFunctionActivity', 'AzureMLBatchExecution': 'AzureMLBatchExecutionActivity', 'AzureMLExecutePipeline': 'AzureMLExecutePipelineActivity', 'AzureMLUpdateResource': 'AzureMLUpdateResourceActivity', 'Copy': 'CopyActivity', 'Custom': 'CustomActivity', 'DataLakeAnalyticsU-SQL': 'DataLakeAnalyticsUSQLActivity', 'DatabricksNotebook': 'DatabricksNotebookActivity', 'DatabricksSparkJar': 'DatabricksSparkJarActivity', 'DatabricksSparkPython': 'DatabricksSparkPythonActivity', 'Delete': 'DeleteActivity', 'ExecuteDataFlow': 'ExecuteDataFlowActivity', 'ExecuteSSISPackage': 'ExecuteSSISPackageActivity', 'GetMetadata': 'GetMetadataActivity', 'HDInsightHive': 'HDInsightHiveActivity', 'HDInsightMapReduce': 'HDInsightMapReduceActivity', 'HDInsightPig': 'HDInsightPigActivity', 'HDInsightSpark': 'HDInsightSparkActivity', 'HDInsightStreaming': 'HDInsightStreamingActivity', 'Lookup': 'LookupActivity', 'SqlServerStoredProcedure': 'SqlServerStoredProcedureActivity', 'WebActivity': 'WebActivity'} } def __init__( self, *, name: str, - trained_model_name: object, - trained_model_linked_service_name: "LinkedServiceReference", - trained_model_file_path: object, additional_properties: Optional[Dict[str, object]] = None, description: Optional[str] = None, depends_on: Optional[List["ActivityDependency"]] = None, @@ -2821,50 +2867,83 @@ def __init__( policy: Optional["ActivityPolicy"] = None, **kwargs ): - super(AzureMLUpdateResourceActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type = 'AzureMLUpdateResource' # type: str - self.trained_model_name = trained_model_name - self.trained_model_linked_service_name = trained_model_linked_service_name - self.trained_model_file_path = trained_model_file_path + super(ExecutionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.type = 'Execution' # type: str + self.linked_service_name = linked_service_name + self.policy = policy -class AzureMLWebServiceFile(msrest.serialization.Model): - """Azure ML WebService Input/Output file. +class AzureDataExplorerCommandActivity(ExecutionActivity): + """Azure Data Explorer command activity. All required parameters must be populated in order to send to Azure. - :param file_path: Required. The relative file path, including container name, in the Azure Blob - Storage specified by the LinkedService. Type: string (or Expression with resultType string). - :type file_path: object - :param linked_service_name: Required. Reference to an Azure Storage LinkedService, where Azure - ML WebService Input/Output file located. + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param command: Required. A control command, according to the Azure Data Explorer command + syntax. Type: string (or Expression with resultType string). + :type command: object + :param command_timeout: Control command timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))..). + :type command_timeout: object """ _validation = { - 'file_path': {'required': True}, - 'linked_service_name': {'required': True}, + 'name': {'required': True}, + 'type': {'required': True}, + 'command': {'required': True}, } _attribute_map = { - 'file_path': {'key': 'filePath', 'type': 'object'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'command': {'key': 'typeProperties.command', 'type': 'object'}, + 'command_timeout': {'key': 'typeProperties.commandTimeout', 'type': 'object'}, } def __init__( self, *, - file_path: object, - linked_service_name: "LinkedServiceReference", + name: str, + command: object, + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + depends_on: Optional[List["ActivityDependency"]] = None, + user_properties: Optional[List["UserProperty"]] = None, + linked_service_name: Optional["LinkedServiceReference"] = None, + policy: Optional["ActivityPolicy"] = None, + command_timeout: Optional[object] = None, **kwargs ): - super(AzureMLWebServiceFile, self).__init__(**kwargs) - self.file_path = file_path - self.linked_service_name = linked_service_name + super(AzureDataExplorerCommandActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.type = 'AzureDataExplorerCommand' # type: str + self.command = command + self.command_timeout = command_timeout -class AzureMySqlLinkedService(LinkedService): - """Azure MySQL database linked service. +class AzureDataExplorerLinkedService(LinkedService): + """Azure Data Explorer (Kusto) linked service. All required parameters must be populated in order to send to Azure. @@ -2881,20 +2960,31 @@ class AzureMySqlLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param connection_string: Required. The connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param password: The Azure key vault secret reference of password in connection string. - :type password: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with + :param endpoint: Required. The endpoint of Azure Data Explorer (the engine's endpoint). URL + will be in the format https://:code:``.:code:``.kusto.windows.net. + Type: string (or Expression with resultType string). + :type endpoint: object + :param service_principal_id: Required. The ID of the service principal used to authenticate + against Azure Data Explorer. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: Required. The key of the service principal used to authenticate + against Kusto. + :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase + :param database: Required. Database name for connection. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type database: object + :param tenant: Required. The name or ID of the tenant to which the service principal belongs. + Type: string (or Expression with resultType string). + :type tenant: object """ _validation = { 'type': {'required': True}, - 'connection_string': {'required': True}, + 'endpoint': {'required': True}, + 'service_principal_id': {'required': True}, + 'service_principal_key': {'required': True}, + 'database': {'required': True}, + 'tenant': {'required': True}, } _attribute_map = { @@ -2904,33 +2994,178 @@ class AzureMySqlLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, } def __init__( self, *, - connection_string: object, + endpoint: object, + service_principal_id: object, + service_principal_key: "SecretBase", + database: object, + tenant: object, additional_properties: Optional[Dict[str, object]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, - password: Optional["AzureKeyVaultSecretReference"] = None, - encrypted_credential: Optional[object] = None, **kwargs ): - super(AzureMySqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'AzureMySql' # type: str - self.connection_string = connection_string - self.password = password - self.encrypted_credential = encrypted_credential + super(AzureDataExplorerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'AzureDataExplorer' # type: str + self.endpoint = endpoint + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.database = database + self.tenant = tenant -class AzureMySqlTableDataset(Dataset): - """The Azure MySQL database dataset. +class AzureDataExplorerSink(CopySink): + """A copy activity Azure Data Explorer sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param ingestion_mapping_name: A name of a pre-created csv mapping that was defined on the + target Kusto table. Type: string. + :type ingestion_mapping_name: object + :param ingestion_mapping_as_json: An explicit column mapping description provided in a json + format. Type: string. + :type ingestion_mapping_as_json: object + :param flush_immediately: If set to true, any aggregation will be skipped. Default is false. + Type: boolean. + :type flush_immediately: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'ingestion_mapping_name': {'key': 'ingestionMappingName', 'type': 'object'}, + 'ingestion_mapping_as_json': {'key': 'ingestionMappingAsJson', 'type': 'object'}, + 'flush_immediately': {'key': 'flushImmediately', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + ingestion_mapping_name: Optional[object] = None, + ingestion_mapping_as_json: Optional[object] = None, + flush_immediately: Optional[object] = None, + **kwargs + ): + super(AzureDataExplorerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'AzureDataExplorerSink' # type: str + self.ingestion_mapping_name = ingestion_mapping_name + self.ingestion_mapping_as_json = ingestion_mapping_as_json + self.flush_immediately = flush_immediately + + +class AzureDataExplorerSource(CopySource): + """A copy activity Azure Data Explorer (Kusto) source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query: Required. Database query. Should be a Kusto Query Language (KQL) query. Type: + string (or Expression with resultType string). + :type query: object + :param no_truncation: The name of the Boolean option that controls whether truncation is + applied to result-sets that go beyond a certain row-count limit. + :type no_truncation: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).. + :type query_timeout: object + """ + + _validation = { + 'type': {'required': True}, + 'query': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + 'no_truncation': {'key': 'noTruncation', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + } + + def __init__( + self, + *, + query: object, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + no_truncation: Optional[object] = None, + query_timeout: Optional[object] = None, + **kwargs + ): + super(AzureDataExplorerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'AzureDataExplorerSource' # type: str + self.query = query + self.no_truncation = no_truncation + self.query_timeout = query_timeout + + +class AzureDataExplorerTableDataset(Dataset): + """The Azure Data Explorer (Kusto) dataset. All required parameters must be populated in order to send to Azure. @@ -2956,11 +3191,8 @@ class AzureMySqlTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The Azure MySQL database table name. Type: string (or Expression with - resultType string). - :type table_name: object - :param table: The name of Azure MySQL database table. Type: string (or Expression with - resultType string). + :param table: The table name of the Azure Data Explorer database. Type: string (or Expression + with resultType string). :type table: object """ @@ -2979,7 +3211,6 @@ class AzureMySqlTableDataset(Dataset): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, 'table': {'key': 'typeProperties.table', 'type': 'object'}, } @@ -2994,18 +3225,16 @@ def __init__( parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, table: Optional[object] = None, **kwargs ): - super(AzureMySqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'AzureMySqlTable' # type: str - self.table_name = table_name + super(AzureDataExplorerTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'AzureDataExplorerTable' # type: str self.table = table -class AzurePostgreSqlLinkedService(LinkedService): - """Azure PostgreSQL linked service. +class AzureDataLakeAnalyticsLinkedService(LinkedService): + """Azure Data Lake Analytics linked service. All required parameters must be populated in order to send to Azure. @@ -3022,11 +3251,27 @@ class AzurePostgreSqlLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param connection_string: An ODBC connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param password: The Azure key vault secret reference of password in connection string. - :type password: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param account_name: Required. The Azure Data Lake Analytics account name. Type: string (or + Expression with resultType string). + :type account_name: object + :param service_principal_id: The ID of the application used to authenticate against the Azure + Data Lake Analytics account. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The Key of the application used to authenticate against the Azure + Data Lake Analytics account. + :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase + :param tenant: Required. The name or ID of the tenant to which the service principal belongs. + Type: string (or Expression with resultType string). + :type tenant: object + :param subscription_id: Data Lake Analytics account subscription ID (if different from Data + Factory account). Type: string (or Expression with resultType string). + :type subscription_id: object + :param resource_group_name: Data Lake Analytics account resource group name (if different from + Data Factory account). Type: string (or Expression with resultType string). + :type resource_group_name: object + :param data_lake_analytics_uri: Azure Data Lake Analytics URI Type: string (or Expression with + resultType string). + :type data_lake_analytics_uri: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -3035,6 +3280,8 @@ class AzurePostgreSqlLinkedService(LinkedService): _validation = { 'type': {'required': True}, + 'account_name': {'required': True}, + 'tenant': {'required': True}, } _attribute_map = { @@ -3044,775 +3291,577 @@ class AzurePostgreSqlLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'account_name': {'key': 'typeProperties.accountName', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'subscription_id': {'key': 'typeProperties.subscriptionId', 'type': 'object'}, + 'resource_group_name': {'key': 'typeProperties.resourceGroupName', 'type': 'object'}, + 'data_lake_analytics_uri': {'key': 'typeProperties.dataLakeAnalyticsUri', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, *, + account_name: object, + tenant: object, additional_properties: Optional[Dict[str, object]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, - connection_string: Optional[object] = None, - password: Optional["AzureKeyVaultSecretReference"] = None, + service_principal_id: Optional[object] = None, + service_principal_key: Optional["SecretBase"] = None, + subscription_id: Optional[object] = None, + resource_group_name: Optional[object] = None, + data_lake_analytics_uri: Optional[object] = None, encrypted_credential: Optional[object] = None, **kwargs ): - super(AzurePostgreSqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'AzurePostgreSql' # type: str - self.connection_string = connection_string - self.password = password + super(AzureDataLakeAnalyticsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'AzureDataLakeAnalytics' # type: str + self.account_name = account_name + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.subscription_id = subscription_id + self.resource_group_name = resource_group_name + self.data_lake_analytics_uri = data_lake_analytics_uri self.encrypted_credential = encrypted_credential -class AzurePostgreSqlTableDataset(Dataset): - """Azure PostgreSQL dataset. +class AzureDataLakeStoreLinkedService(LinkedService): + """Azure Data Lake Store linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Type of linked service.Constant filled by server. :type type: str - :param description: Dataset description. + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. + :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. + :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The table name of the Azure PostgreSQL database which includes both schema - and table. Type: string (or Expression with resultType string). - :type table_name: object - :param table: The table name of the Azure PostgreSQL database. Type: string (or Expression with - resultType string). - :type table: object - :param schema_type_properties_schema: The schema name of the Azure PostgreSQL database. Type: + :param data_lake_store_uri: Required. Data Lake Store service URI. Type: string (or Expression + with resultType string). + :type data_lake_store_uri: object + :param service_principal_id: The ID of the application used to authenticate against the Azure + Data Lake Store account. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The Key of the application used to authenticate against the Azure + Data Lake Store account. + :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: object + :type tenant: object + :param account_name: Data Lake Store account name. Type: string (or Expression with resultType + string). + :type account_name: object + :param subscription_id: Data Lake Store account subscription ID (if different from Data Factory + account). Type: string (or Expression with resultType string). + :type subscription_id: object + :param resource_group_name: Data Lake Store account resource group name (if different from Data + Factory account). Type: string (or Expression with resultType string). + :type resource_group_name: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, + 'data_lake_store_uri': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'data_lake_store_uri': {'key': 'typeProperties.dataLakeStoreUri', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'account_name': {'key': 'typeProperties.accountName', 'type': 'object'}, + 'subscription_id': {'key': 'typeProperties.subscriptionId', 'type': 'object'}, + 'resource_group_name': {'key': 'typeProperties.resourceGroupName', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, *, - linked_service_name: "LinkedServiceReference", + data_lake_store_uri: object, additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, - table: Optional[object] = None, - schema_type_properties_schema: Optional[object] = None, + service_principal_id: Optional[object] = None, + service_principal_key: Optional["SecretBase"] = None, + tenant: Optional[object] = None, + account_name: Optional[object] = None, + subscription_id: Optional[object] = None, + resource_group_name: Optional[object] = None, + encrypted_credential: Optional[object] = None, **kwargs ): - super(AzurePostgreSqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'AzurePostgreSqlTable' # type: str - self.table_name = table_name - self.table = table - self.schema_type_properties_schema = schema_type_properties_schema + super(AzureDataLakeStoreLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'AzureDataLakeStore' # type: str + self.data_lake_store_uri = data_lake_store_uri + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.account_name = account_name + self.subscription_id = subscription_id + self.resource_group_name = resource_group_name + self.encrypted_credential = encrypted_credential -class AzureSearchIndexDataset(Dataset): - """The Azure Search Index. +class AzureDataLakeStoreLocation(DatasetLocation): + """The location of azure data lake store dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Type of dataset storage location.Constant filled by server. :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param index_name: Required. The name of the Azure Search Index. Type: string (or Expression - with resultType string). - :type index_name: object + :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + string). + :type file_name: object """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, - 'index_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'index_name': {'key': 'typeProperties.indexName', 'type': 'object'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, } def __init__( self, *, - linked_service_name: "LinkedServiceReference", - index_name: object, additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, + folder_path: Optional[object] = None, + file_name: Optional[object] = None, **kwargs ): - super(AzureSearchIndexDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'AzureSearchIndex' # type: str - self.index_name = index_name + super(AzureDataLakeStoreLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) + self.type = 'AzureDataLakeStoreLocation' # type: str -class AzureSearchLinkedService(LinkedService): - """Linked service for Windows Azure Search Service. +class AzureDataLakeStoreReadSettings(StoreReadSettings): + """Azure data lake store read settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. The read setting type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param url: Required. URL for Azure Search service. Type: string (or Expression with resultType - string). - :type url: object - :param key: Admin Key for Azure Search service. - :type key: ~azure.synapse.artifacts.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :type recursive: object + :param wildcard_folder_path: ADLS wildcardFolderPath. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type wildcard_folder_path: object + :param wildcard_file_name: ADLS wildcardFileName. Type: string (or Expression with resultType + string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. Type: string (or + Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression + with resultType string). + :type modified_datetime_end: object """ _validation = { 'type': {'required': True}, - 'url': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'key': {'key': 'typeProperties.key', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } def __init__( self, *, - url: object, additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, - description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - key: Optional["SecretBase"] = None, - encrypted_credential: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + recursive: Optional[object] = None, + wildcard_folder_path: Optional[object] = None, + wildcard_file_name: Optional[object] = None, + enable_partition_discovery: Optional[bool] = None, + modified_datetime_start: Optional[object] = None, + modified_datetime_end: Optional[object] = None, **kwargs ): - super(AzureSearchLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'AzureSearch' # type: str - self.url = url - self.key = key - self.encrypted_credential = encrypted_credential + super(AzureDataLakeStoreReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'AzureDataLakeStoreReadSettings' # type: str + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.enable_partition_discovery = enable_partition_discovery + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end -class AzureSqlDatabaseLinkedService(LinkedService): - """Microsoft Azure SQL Database linked service. +class AzureDataLakeStoreSink(CopySink): + """A copy activity Azure Data Lake Store sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Copy sink type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: Required. The connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param password: The Azure key vault secret reference of password in connection string. - :type password: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference - :param service_principal_id: The ID of the service principal used to authenticate against Azure - SQL Database. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The key of the service principal used to authenticate against - Azure SQL Database. - :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal belongs. Type: - string (or Expression with resultType string). - :type tenant: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + :param enable_adls_single_file_parallel: Single File Parallel. + :type enable_adls_single_file_parallel: object """ _validation = { 'type': {'required': True}, - 'connection_string': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'enable_adls_single_file_parallel': {'key': 'enableAdlsSingleFileParallel', 'type': 'object'}, } def __init__( self, *, - connection_string: object, additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, - description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - password: Optional["AzureKeyVaultSecretReference"] = None, - service_principal_id: Optional[object] = None, - service_principal_key: Optional["SecretBase"] = None, - tenant: Optional[object] = None, - encrypted_credential: Optional[object] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + copy_behavior: Optional[object] = None, + enable_adls_single_file_parallel: Optional[object] = None, **kwargs ): - super(AzureSqlDatabaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'AzureSqlDatabase' # type: str - self.connection_string = connection_string - self.password = password - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.tenant = tenant - self.encrypted_credential = encrypted_credential + super(AzureDataLakeStoreSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'AzureDataLakeStoreSink' # type: str + self.copy_behavior = copy_behavior + self.enable_adls_single_file_parallel = enable_adls_single_file_parallel -class AzureSqlDWLinkedService(LinkedService): - """Azure SQL Data Warehouse linked service. +class AzureDataLakeStoreSource(CopySource): + """A copy activity Azure Data Lake source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: Required. The connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param password: The Azure key vault secret reference of password in connection string. - :type password: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference - :param service_principal_id: The ID of the service principal used to authenticate against Azure - SQL Data Warehouse. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The key of the service principal used to authenticate against - Azure SQL Data Warehouse. - :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal belongs. Type: - string (or Expression with resultType string). - :type tenant: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :type recursive: object """ _validation = { 'type': {'required': True}, - 'connection_string': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, } def __init__( self, *, - connection_string: object, additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, - description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - password: Optional["AzureKeyVaultSecretReference"] = None, - service_principal_id: Optional[object] = None, - service_principal_key: Optional["SecretBase"] = None, - tenant: Optional[object] = None, - encrypted_credential: Optional[object] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + recursive: Optional[object] = None, **kwargs ): - super(AzureSqlDWLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'AzureSqlDW' # type: str - self.connection_string = connection_string - self.password = password - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.tenant = tenant - self.encrypted_credential = encrypted_credential + super(AzureDataLakeStoreSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'AzureDataLakeStoreSource' # type: str + self.recursive = recursive -class AzureSqlDWTableDataset(Dataset): - """The Azure SQL Data Warehouse dataset. +class AzureDataLakeStoreWriteSettings(StoreWriteSettings): + """Azure data lake store write settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. The write setting type.Constant filled by server. :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table - properties instead. - :type table_name: object - :param schema_type_properties_schema: The schema name of the Azure SQL Data Warehouse. Type: - string (or Expression with resultType string). - :type schema_type_properties_schema: object - :param table: The table name of the Azure SQL Data Warehouse. Type: string (or Expression with - resultType string). - :type table: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } def __init__( self, *, - linked_service_name: "LinkedServiceReference", additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, - schema_type_properties_schema: Optional[object] = None, - table: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + copy_behavior: Optional[object] = None, **kwargs ): - super(AzureSqlDWTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'AzureSqlDWTable' # type: str - self.table_name = table_name - self.schema_type_properties_schema = schema_type_properties_schema - self.table = table + super(AzureDataLakeStoreWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + self.type = 'AzureDataLakeStoreWriteSettings' # type: str -class AzureSqlMILinkedService(LinkedService): - """Azure SQL Managed Instance linked service. +class Resource(msrest.serialization.Model): + """Resource. - All required parameters must be populated in order to send to Azure. + Variables are only populated by the server, and will be ignored when sending a request. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: Required. The connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param password: The Azure key vault secret reference of password in connection string. - :type password: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference - :param service_principal_id: The ID of the service principal used to authenticate against Azure - SQL Managed Instance. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The key of the service principal used to authenticate against - Azure SQL Managed Instance. - :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal belongs. Type: - string (or Expression with resultType string). - :type tenant: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str """ _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, - *, - connection_string: object, - additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, - description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - password: Optional["AzureKeyVaultSecretReference"] = None, - service_principal_id: Optional[object] = None, - service_principal_key: Optional["SecretBase"] = None, - tenant: Optional[object] = None, - encrypted_credential: Optional[object] = None, **kwargs ): - super(AzureSqlMILinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'AzureSqlMI' # type: str - self.connection_string = connection_string - self.password = password - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.tenant = tenant - self.encrypted_credential = encrypted_credential + super(Resource, self).__init__(**kwargs) + self.id = None + self.name = None + self.type = None -class AzureSqlMITableDataset(Dataset): - """The Azure SQL Managed Instance dataset. +class AzureEntityResource(Resource): + """The resource model definition for a Azure Resource Manager resource with an etag. - All required parameters must be populated in order to send to Azure. + Variables are only populated by the server, and will be ignored when sending a request. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table - properties instead. - :type table_name: object - :param schema_type_properties_schema: The schema name of the Azure SQL Managed Instance. Type: - string (or Expression with resultType string). - :type schema_type_properties_schema: object - :param table: The table name of the Azure SQL Managed Instance dataset. Type: string (or - Expression with resultType string). - :type table: object + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + :ivar etag: Resource Etag. + :vartype etag: str """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'etag': {'key': 'etag', 'type': 'str'}, } def __init__( self, - *, - linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, - schema_type_properties_schema: Optional[object] = None, - table: Optional[object] = None, **kwargs ): - super(AzureSqlMITableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'AzureSqlMITable' # type: str - self.table_name = table_name - self.schema_type_properties_schema = schema_type_properties_schema - self.table = table + super(AzureEntityResource, self).__init__(**kwargs) + self.etag = None -class AzureSqlTableDataset(Dataset): - """The Azure SQL Server database dataset. +class AzureFileStorageLinkedService(LinkedService): + """Azure File Storage linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Type of linked service.Constant filled by server. :type type: str - :param description: Dataset description. + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. + :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. + :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table - properties instead. - :type table_name: object - :param schema_type_properties_schema: The schema name of the Azure SQL database. Type: string - (or Expression with resultType string). - :type schema_type_properties_schema: object - :param table: The table name of the Azure SQL database. Type: string (or Expression with + :param host: Required. Host name of the server. Type: string (or Expression with resultType + string). + :type host: object + :param user_id: User ID to logon the server. Type: string (or Expression with resultType + string). + :type user_id: object + :param password: Password to logon the server. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type table: object + :type encrypted_credential: object """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, + 'host': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'user_id': {'key': 'typeProperties.userId', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, *, - linked_service_name: "LinkedServiceReference", + host: object, additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, - schema_type_properties_schema: Optional[object] = None, - table: Optional[object] = None, + user_id: Optional[object] = None, + password: Optional["SecretBase"] = None, + encrypted_credential: Optional[object] = None, **kwargs ): - super(AzureSqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'AzureSqlTable' # type: str - self.table_name = table_name - self.schema_type_properties_schema = schema_type_properties_schema - self.table = table + super(AzureFileStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'AzureFileStorage' # type: str + self.host = host + self.user_id = user_id + self.password = password + self.encrypted_credential = encrypted_credential -class AzureStorageLinkedService(LinkedService): - """The storage account linked service. +class AzureFileStorageLocation(DatasetLocation): + """The location of file server dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Type of dataset storage location.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: The connection string. It is mutually exclusive with sasUri property. - Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param account_key: The Azure key vault secret reference of accountKey in connection string. - :type account_key: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference - :param sas_uri: SAS URI of the Azure Storage resource. It is mutually exclusive with - connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. - :type sas_uri: object - :param sas_token: The Azure key vault secret reference of sasToken in sas uri. - :type sas_token: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with + :param folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :type encrypted_credential: str + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + string). + :type file_name: object """ _validation = { @@ -3822,113 +3871,178 @@ class AzureStorageLinkedService(LinkedService): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, - 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, - 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, } def __init__( self, *, additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, - description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - connection_string: Optional[object] = None, - account_key: Optional["AzureKeyVaultSecretReference"] = None, - sas_uri: Optional[object] = None, - sas_token: Optional["AzureKeyVaultSecretReference"] = None, - encrypted_credential: Optional[str] = None, + folder_path: Optional[object] = None, + file_name: Optional[object] = None, **kwargs ): - super(AzureStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'AzureStorage' # type: str - self.connection_string = connection_string - self.account_key = account_key - self.sas_uri = sas_uri - self.sas_token = sas_token - self.encrypted_credential = encrypted_credential + super(AzureFileStorageLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) + self.type = 'AzureFileStorageLocation' # type: str -class AzureTableDataset(Dataset): - """The Azure Table storage dataset. +class AzureFileStorageReadSettings(StoreReadSettings): + """Azure File Storage read settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. The read setting type.Constant filled by server. :type type: str - :param description: Dataset description. + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :type recursive: object + :param wildcard_folder_path: Azure File Storage wildcardFolderPath. Type: string (or Expression + with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Azure File Storage wildcardFileName. Type: string (or Expression + with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. Type: string (or + Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression + with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + max_concurrent_connections: Optional[object] = None, + recursive: Optional[object] = None, + wildcard_folder_path: Optional[object] = None, + wildcard_file_name: Optional[object] = None, + enable_partition_discovery: Optional[bool] = None, + modified_datetime_start: Optional[object] = None, + modified_datetime_end: Optional[object] = None, + **kwargs + ): + super(AzureFileStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'AzureFileStorageReadSettings' # type: str + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.enable_partition_discovery = enable_partition_discovery + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end + + +class AzureFunctionActivity(ExecutionActivity): + """Azure Function activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: Required. The table name of the Azure Table storage. Type: string (or - Expression with resultType string). - :type table_name: object + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param method: Required. Rest API method for target endpoint. Possible values include: "GET", + "POST", "PUT", "DELETE", "OPTIONS", "HEAD", "TRACE". + :type method: str or ~azure.synapse.artifacts.models.AzureFunctionActivityMethod + :param function_name: Required. Name of the Function that the Azure Function Activity will + call. Type: string (or Expression with resultType string). + :type function_name: object + :param headers: Represents the headers that will be sent to the request. For example, to set + the language and type on a request: "headers" : { "Accept-Language": "en-us", "Content-Type": + "application/json" }. Type: string (or Expression with resultType string). + :type headers: object + :param body: Represents the payload that will be sent to the endpoint. Required for POST/PUT + method, not allowed for GET method Type: string (or Expression with resultType string). + :type body: object """ _validation = { + 'name': {'required': True}, 'type': {'required': True}, - 'linked_service_name': {'required': True}, - 'table_name': {'required': True}, + 'method': {'required': True}, + 'function_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'method': {'key': 'typeProperties.method', 'type': 'str'}, + 'function_name': {'key': 'typeProperties.functionName', 'type': 'object'}, + 'headers': {'key': 'typeProperties.headers', 'type': 'object'}, + 'body': {'key': 'typeProperties.body', 'type': 'object'}, } def __init__( self, *, - linked_service_name: "LinkedServiceReference", - table_name: object, + name: str, + method: Union[str, "AzureFunctionActivityMethod"], + function_name: object, additional_properties: Optional[Dict[str, object]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, + depends_on: Optional[List["ActivityDependency"]] = None, + user_properties: Optional[List["UserProperty"]] = None, + linked_service_name: Optional["LinkedServiceReference"] = None, + policy: Optional["ActivityPolicy"] = None, + headers: Optional[object] = None, + body: Optional[object] = None, **kwargs ): - super(AzureTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'AzureTable' # type: str - self.table_name = table_name + super(AzureFunctionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.type = 'AzureFunctionActivity' # type: str + self.method = method + self.function_name = function_name + self.headers = headers + self.body = body -class AzureTableStorageLinkedService(LinkedService): - """The azure table storage linked service. +class AzureFunctionLinkedService(LinkedService): + """Azure Function linked service. All required parameters must be populated in order to send to Azure. @@ -3945,24 +4059,20 @@ class AzureTableStorageLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param connection_string: The connection string. It is mutually exclusive with sasUri property. - Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param account_key: The Azure key vault secret reference of accountKey in connection string. - :type account_key: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference - :param sas_uri: SAS URI of the Azure Storage resource. It is mutually exclusive with - connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. - :type sas_uri: object - :param sas_token: The Azure key vault secret reference of sasToken in sas uri. - :type sas_token: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param function_app_url: Required. The endpoint of the Azure Function App. URL will be in the + format https://:code:``.azurewebsites.net. + :type function_app_url: object + :param function_key: Function or Host key for Azure Function App. + :type function_key: ~azure.synapse.artifacts.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: str + :type encrypted_credential: object """ _validation = { 'type': {'required': True}, + 'function_app_url': {'required': True}, } _attribute_map = { @@ -3972,146 +4082,164 @@ class AzureTableStorageLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, - 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, - 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, + 'function_app_url': {'key': 'typeProperties.functionAppUrl', 'type': 'object'}, + 'function_key': {'key': 'typeProperties.functionKey', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, *, + function_app_url: object, additional_properties: Optional[Dict[str, object]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, - connection_string: Optional[object] = None, - account_key: Optional["AzureKeyVaultSecretReference"] = None, - sas_uri: Optional[object] = None, - sas_token: Optional["AzureKeyVaultSecretReference"] = None, - encrypted_credential: Optional[str] = None, + function_key: Optional["SecretBase"] = None, + encrypted_credential: Optional[object] = None, **kwargs ): - super(AzureTableStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'AzureTableStorage' # type: str - self.connection_string = connection_string - self.account_key = account_key - self.sas_uri = sas_uri - self.sas_token = sas_token + super(AzureFunctionLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'AzureFunction' # type: str + self.function_app_url = function_app_url + self.function_key = function_key self.encrypted_credential = encrypted_credential -class BigDataPoolReference(msrest.serialization.Model): - """Big data pool reference. +class AzureKeyVaultLinkedService(LinkedService): + """Azure Key Vault linked service. All required parameters must be populated in order to send to Azure. - :param type: Required. Big data pool reference type. Possible values include: - "BigDataPoolReference". - :type type: str or ~azure.synapse.artifacts.models.BigDataPoolReferenceType - :param reference_name: Required. Reference big data pool name. - :type reference_name: str + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param base_url: Required. The base URL of the Azure Key Vault. e.g. + https://myakv.vault.azure.net Type: string (or Expression with resultType string). + :type base_url: object """ _validation = { 'type': {'required': True}, - 'reference_name': {'required': True}, + 'base_url': {'required': True}, } _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'reference_name': {'key': 'referenceName', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'base_url': {'key': 'typeProperties.baseUrl', 'type': 'object'}, } def __init__( self, *, - type: Union[str, "BigDataPoolReferenceType"], - reference_name: str, + base_url: object, + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, **kwargs ): - super(BigDataPoolReference, self).__init__(**kwargs) - self.type = type - self.reference_name = reference_name + super(AzureKeyVaultLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'AzureKeyVault' # type: str + self.base_url = base_url -class BinaryDataset(Dataset): - """Binary dataset. +class SecretBase(msrest.serialization.Model): + """The base definition of a secret type. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AzureKeyVaultSecretReference, SecureString. All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Type of the secret.Constant filled by server. :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param location: The location of the Binary storage. - :type location: ~azure.synapse.artifacts.models.DatasetLocation - :param compression: The data compression method used for the binary dataset. - :type compression: ~azure.synapse.artifacts.models.DatasetCompression """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + _subtype_map = { + 'type': {'AzureKeyVaultSecret': 'AzureKeyVaultSecretReference', 'SecureString': 'SecureString'} + } + + def __init__( + self, + **kwargs + ): + super(SecretBase, self).__init__(**kwargs) + self.type = None # type: Optional[str] + + +class AzureKeyVaultSecretReference(SecretBase): + """Azure Key Vault secret reference. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Type of the secret.Constant filled by server. + :type type: str + :param store: Required. The Azure Key Vault linked service reference. + :type store: ~azure.synapse.artifacts.models.LinkedServiceReference + :param secret_name: Required. The name of the secret in Azure Key Vault. Type: string (or + Expression with resultType string). + :type secret_name: object + :param secret_version: The version of the secret in Azure Key Vault. The default value is the + latest version of the secret. Type: string (or Expression with resultType string). + :type secret_version: object + """ + + _validation = { + 'type': {'required': True}, + 'store': {'required': True}, + 'secret_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'store': {'key': 'store', 'type': 'LinkedServiceReference'}, + 'secret_name': {'key': 'secretName', 'type': 'object'}, + 'secret_version': {'key': 'secretVersion', 'type': 'object'}, } def __init__( self, *, - linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, - location: Optional["DatasetLocation"] = None, - compression: Optional["DatasetCompression"] = None, + store: "LinkedServiceReference", + secret_name: object, + secret_version: Optional[object] = None, **kwargs ): - super(BinaryDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'Binary' # type: str - self.location = location - self.compression = compression + super(AzureKeyVaultSecretReference, self).__init__(**kwargs) + self.type = 'AzureKeyVaultSecret' # type: str + self.store = store + self.secret_name = secret_name + self.secret_version = secret_version -class CassandraLinkedService(LinkedService): - """Linked service for Cassandra data source. +class AzureMariaDBLinkedService(LinkedService): + """Azure Database for MariaDB linked service. All required parameters must be populated in order to send to Azure. @@ -4128,20 +4256,11 @@ class CassandraLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param host: Required. Host name for connection. Type: string (or Expression with resultType - string). - :type host: object - :param authentication_type: AuthenticationType to be used for connection. Type: string (or - Expression with resultType string). - :type authentication_type: object - :param port: The port for the connection. Type: integer (or Expression with resultType - integer). - :type port: object - :param username: Username for authentication. Type: string (or Expression with resultType - string). - :type username: object - :param password: Password for authentication. - :type password: ~azure.synapse.artifacts.models.SecretBase + :param connection_string: An ODBC connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param pwd: The Azure key vault secret reference of password in connection string. + :type pwd: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -4150,7 +4269,6 @@ class CassandraLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'host': {'required': True}, } _attribute_map = { @@ -4160,42 +4278,90 @@ class CassandraLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, *, - host: object, additional_properties: Optional[Dict[str, object]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, - authentication_type: Optional[object] = None, - port: Optional[object] = None, - username: Optional[object] = None, - password: Optional["SecretBase"] = None, + connection_string: Optional[object] = None, + pwd: Optional["AzureKeyVaultSecretReference"] = None, encrypted_credential: Optional[object] = None, **kwargs ): - super(CassandraLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'Cassandra' # type: str - self.host = host - self.authentication_type = authentication_type - self.port = port - self.username = username - self.password = password + super(AzureMariaDBLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'AzureMariaDB' # type: str + self.connection_string = connection_string + self.pwd = pwd self.encrypted_credential = encrypted_credential -class CassandraTableDataset(Dataset): - """The Cassandra database dataset. +class AzureMariaDBSource(TabularSource): + """A copy activity Azure MariaDB source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(AzureMariaDBSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'AzureMariaDBSource' # type: str + self.query = query + + +class AzureMariaDBTableDataset(Dataset): + """Azure Database for MariaDB dataset. All required parameters must be populated in order to send to Azure. @@ -4221,12 +4387,8 @@ class CassandraTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The table name of the Cassandra database. Type: string (or Expression with - resultType string). + :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object - :param keyspace: The keyspace of the Cassandra database. Type: string (or Expression with - resultType string). - :type keyspace: object """ _validation = { @@ -4245,7 +4407,6 @@ class CassandraTableDataset(Dataset): 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'keyspace': {'key': 'typeProperties.keyspace', 'type': 'object'}, } def __init__( @@ -4260,129 +4421,188 @@ def __init__( annotations: Optional[List[object]] = None, folder: Optional["DatasetFolder"] = None, table_name: Optional[object] = None, - keyspace: Optional[object] = None, **kwargs ): - super(CassandraTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'CassandraTable' # type: str + super(AzureMariaDBTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'AzureMariaDBTable' # type: str self.table_name = table_name - self.keyspace = keyspace -class CloudError(msrest.serialization.Model): - """The object that defines the structure of an Azure Synapse error response. +class AzureMLBatchExecutionActivity(ExecutionActivity): + """Azure ML Batch Execution activity. All required parameters must be populated in order to send to Azure. - :param code: Required. Error code. - :type code: str - :param message: Required. Error message. - :type message: str - :param target: Property name/path in request associated with error. - :type target: str - :param details: Array with additional error details. - :type details: list[~azure.synapse.artifacts.models.CloudError] + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param linked_service_name: Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param global_parameters: Key,Value pairs to be passed to the Azure ML Batch Execution Service + endpoint. Keys must match the names of web service parameters defined in the published Azure ML + web service. Values will be passed in the GlobalParameters property of the Azure ML batch + execution request. + :type global_parameters: dict[str, object] + :param web_service_outputs: Key,Value pairs, mapping the names of Azure ML endpoint's Web + Service Outputs to AzureMLWebServiceFile objects specifying the output Blob locations. This + information will be passed in the WebServiceOutputs property of the Azure ML batch execution + request. + :type web_service_outputs: dict[str, ~azure.synapse.artifacts.models.AzureMLWebServiceFile] + :param web_service_inputs: Key,Value pairs, mapping the names of Azure ML endpoint's Web + Service Inputs to AzureMLWebServiceFile objects specifying the input Blob locations.. This + information will be passed in the WebServiceInputs property of the Azure ML batch execution + request. + :type web_service_inputs: dict[str, ~azure.synapse.artifacts.models.AzureMLWebServiceFile] """ _validation = { - 'code': {'required': True}, - 'message': {'required': True}, + 'name': {'required': True}, + 'type': {'required': True}, } _attribute_map = { - 'code': {'key': 'error.code', 'type': 'str'}, - 'message': {'key': 'error.message', 'type': 'str'}, - 'target': {'key': 'error.target', 'type': 'str'}, - 'details': {'key': 'error.details', 'type': '[CloudError]'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'global_parameters': {'key': 'typeProperties.globalParameters', 'type': '{object}'}, + 'web_service_outputs': {'key': 'typeProperties.webServiceOutputs', 'type': '{AzureMLWebServiceFile}'}, + 'web_service_inputs': {'key': 'typeProperties.webServiceInputs', 'type': '{AzureMLWebServiceFile}'}, } def __init__( self, *, - code: str, - message: str, - target: Optional[str] = None, - details: Optional[List["CloudError"]] = None, + name: str, + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + depends_on: Optional[List["ActivityDependency"]] = None, + user_properties: Optional[List["UserProperty"]] = None, + linked_service_name: Optional["LinkedServiceReference"] = None, + policy: Optional["ActivityPolicy"] = None, + global_parameters: Optional[Dict[str, object]] = None, + web_service_outputs: Optional[Dict[str, "AzureMLWebServiceFile"]] = None, + web_service_inputs: Optional[Dict[str, "AzureMLWebServiceFile"]] = None, **kwargs ): - super(CloudError, self).__init__(**kwargs) - self.code = code - self.message = message - self.target = target - self.details = details + super(AzureMLBatchExecutionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.type = 'AzureMLBatchExecution' # type: str + self.global_parameters = global_parameters + self.web_service_outputs = web_service_outputs + self.web_service_inputs = web_service_inputs -class CommonDataServiceForAppsEntityDataset(Dataset): - """The Common Data Service for Apps entity dataset. +class AzureMLExecutePipelineActivity(ExecutionActivity): + """Azure ML Execute Pipeline activity. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. :type type: str - :param description: Dataset description. + :param description: Activity description. :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param entity_name: The logical name of the entity. Type: string (or Expression with resultType - string). - :type entity_name: object + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param ml_pipeline_id: Required. ID of the published Azure ML pipeline. Type: string (or + Expression with resultType string). + :type ml_pipeline_id: object + :param experiment_name: Run history experiment name of the pipeline run. This information will + be passed in the ExperimentName property of the published pipeline execution request. Type: + string (or Expression with resultType string). + :type experiment_name: object + :param ml_pipeline_parameters: Key,Value pairs to be passed to the published Azure ML pipeline + endpoint. Keys must match the names of pipeline parameters defined in the published pipeline. + Values will be passed in the ParameterAssignments property of the published pipeline execution + request. Type: object with key value pairs (or Expression with resultType object). + :type ml_pipeline_parameters: object + :param ml_parent_run_id: The parent Azure ML Service pipeline run id. This information will be + passed in the ParentRunId property of the published pipeline execution request. Type: string + (or Expression with resultType string). + :type ml_parent_run_id: object + :param continue_on_step_failure: Whether to continue execution of other steps in the + PipelineRun if a step fails. This information will be passed in the continueOnStepFailure + property of the published pipeline execution request. Type: boolean (or Expression with + resultType boolean). + :type continue_on_step_failure: object """ _validation = { + 'name': {'required': True}, 'type': {'required': True}, - 'linked_service_name': {'required': True}, + 'ml_pipeline_id': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'ml_pipeline_id': {'key': 'typeProperties.mlPipelineId', 'type': 'object'}, + 'experiment_name': {'key': 'typeProperties.experimentName', 'type': 'object'}, + 'ml_pipeline_parameters': {'key': 'typeProperties.mlPipelineParameters', 'type': 'object'}, + 'ml_parent_run_id': {'key': 'typeProperties.mlParentRunId', 'type': 'object'}, + 'continue_on_step_failure': {'key': 'typeProperties.continueOnStepFailure', 'type': 'object'}, } def __init__( self, *, - linked_service_name: "LinkedServiceReference", + name: str, + ml_pipeline_id: object, additional_properties: Optional[Dict[str, object]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, - entity_name: Optional[object] = None, + depends_on: Optional[List["ActivityDependency"]] = None, + user_properties: Optional[List["UserProperty"]] = None, + linked_service_name: Optional["LinkedServiceReference"] = None, + policy: Optional["ActivityPolicy"] = None, + experiment_name: Optional[object] = None, + ml_pipeline_parameters: Optional[object] = None, + ml_parent_run_id: Optional[object] = None, + continue_on_step_failure: Optional[object] = None, **kwargs ): - super(CommonDataServiceForAppsEntityDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'CommonDataServiceForAppsEntity' # type: str - self.entity_name = entity_name + super(AzureMLExecutePipelineActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.type = 'AzureMLExecutePipeline' # type: str + self.ml_pipeline_id = ml_pipeline_id + self.experiment_name = experiment_name + self.ml_pipeline_parameters = ml_pipeline_parameters + self.ml_parent_run_id = ml_parent_run_id + self.continue_on_step_failure = continue_on_step_failure -class CommonDataServiceForAppsLinkedService(LinkedService): - """Common Data Service for Apps linked service. +class AzureMLLinkedService(LinkedService): + """Azure ML Studio Web Service linked service. All required parameters must be populated in order to send to Azure. @@ -4399,54 +4619,24 @@ class CommonDataServiceForAppsLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param deployment_type: Required. The deployment type of the Common Data Service for Apps - instance. 'Online' for Common Data Service for Apps Online and 'OnPremisesWithIfd' for Common - Data Service for Apps on-premises with Ifd. Type: string (or Expression with resultType - string). Possible values include: "Online", "OnPremisesWithIfd". - :type deployment_type: str or ~azure.synapse.artifacts.models.DynamicsDeploymentType - :param host_name: The host name of the on-premises Common Data Service for Apps server. The - property is required for on-prem and not allowed for online. Type: string (or Expression with - resultType string). - :type host_name: object - :param port: The port of on-premises Common Data Service for Apps server. The property is - required for on-prem and not allowed for online. Default is 443. Type: integer (or Expression - with resultType integer), minimum: 0. - :type port: object - :param service_uri: The URL to the Microsoft Common Data Service for Apps server. The property - is required for on-line and not allowed for on-prem. Type: string (or Expression with - resultType string). - :type service_uri: object - :param organization_name: The organization name of the Common Data Service for Apps instance. - The property is required for on-prem and required for online when there are more than one - Common Data Service for Apps instances associated with the user. Type: string (or Expression + :param ml_endpoint: Required. The Batch Execution REST URL for an Azure ML Studio Web Service + endpoint. Type: string (or Expression with resultType string). + :type ml_endpoint: object + :param api_key: Required. The API key for accessing the Azure ML model endpoint. + :type api_key: ~azure.synapse.artifacts.models.SecretBase + :param update_resource_endpoint: The Update Resource REST URL for an Azure ML Studio Web + Service endpoint. Type: string (or Expression with resultType string). + :type update_resource_endpoint: object + :param service_principal_id: The ID of the service principal used to authenticate against the + ARM-based updateResourceEndpoint of an Azure ML Studio web service. Type: string (or Expression with resultType string). - :type organization_name: object - :param authentication_type: Required. The authentication type to connect to Common Data Service - for Apps server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario. - 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or - Expression with resultType string). Possible values include: "Office365", "Ifd", - "AADServicePrincipal". - :type authentication_type: str or ~azure.synapse.artifacts.models.DynamicsAuthenticationType - :param username: User name to access the Common Data Service for Apps instance. Type: string - (or Expression with resultType string). - :type username: object - :param password: Password to access the Common Data Service for Apps instance. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param service_principal_id: The client ID of the application in Azure Active Directory used - for Server-To-Server authentication. Type: string (or Expression with resultType string). :type service_principal_id: object - :param service_principal_credential_type: The service principal credential type to use in - Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' - for certificate. Type: string (or Expression with resultType string). Possible values include: - "ServicePrincipalKey", "ServicePrincipalCert". - :type service_principal_credential_type: str or - ~azure.synapse.artifacts.models.DynamicsServicePrincipalCredentialType - :param service_principal_credential: The credential of the service principal object in Azure - Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', - servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If - servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only - be AzureKeyVaultSecretReference. - :type service_principal_credential: ~azure.synapse.artifacts.models.SecretBase + :param service_principal_key: The key of the service principal used to authenticate against the + ARM-based updateResourceEndpoint of an Azure ML Studio web service. + :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal belongs. Type: + string (or Expression with resultType string). + :type tenant: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -4455,8 +4645,8 @@ class CommonDataServiceForAppsLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'deployment_type': {'required': True}, - 'authentication_type': {'required': True}, + 'ml_endpoint': {'required': True}, + 'api_key': {'required': True}, } _attribute_map = { @@ -4466,60 +4656,45 @@ class CommonDataServiceForAppsLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, - 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, - 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'ml_endpoint': {'key': 'typeProperties.mlEndpoint', 'type': 'object'}, + 'api_key': {'key': 'typeProperties.apiKey', 'type': 'SecretBase'}, + 'update_resource_endpoint': {'key': 'typeProperties.updateResourceEndpoint', 'type': 'object'}, 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'str'}, - 'service_principal_credential': {'key': 'typeProperties.servicePrincipalCredential', 'type': 'SecretBase'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, *, - deployment_type: Union[str, "DynamicsDeploymentType"], - authentication_type: Union[str, "DynamicsAuthenticationType"], + ml_endpoint: object, + api_key: "SecretBase", additional_properties: Optional[Dict[str, object]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, - host_name: Optional[object] = None, - port: Optional[object] = None, - service_uri: Optional[object] = None, - organization_name: Optional[object] = None, - username: Optional[object] = None, - password: Optional["SecretBase"] = None, + update_resource_endpoint: Optional[object] = None, service_principal_id: Optional[object] = None, - service_principal_credential_type: Optional[Union[str, "DynamicsServicePrincipalCredentialType"]] = None, - service_principal_credential: Optional["SecretBase"] = None, + service_principal_key: Optional["SecretBase"] = None, + tenant: Optional[object] = None, encrypted_credential: Optional[object] = None, **kwargs ): - super(CommonDataServiceForAppsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'CommonDataServiceForApps' # type: str - self.deployment_type = deployment_type - self.host_name = host_name - self.port = port - self.service_uri = service_uri - self.organization_name = organization_name - self.authentication_type = authentication_type - self.username = username - self.password = password - self.service_principal_id = service_principal_id - self.service_principal_credential_type = service_principal_credential_type - self.service_principal_credential = service_principal_credential + super(AzureMLLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'AzureML' # type: str + self.ml_endpoint = ml_endpoint + self.api_key = api_key + self.update_resource_endpoint = update_resource_endpoint + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant self.encrypted_credential = encrypted_credential -class ConcurLinkedService(LinkedService): - """Concur Service linked service. +class AzureMLServiceLinkedService(LinkedService): + """Azure ML Service linked service. All required parameters must be populated in order to send to Azure. @@ -4536,23 +4711,25 @@ class ConcurLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param client_id: Required. Application client_id supplied by Concur App Management. - :type client_id: object - :param username: Required. The user name that you use to access Concur Service. - :type username: object - :param password: The password corresponding to the user name that you provided in the username - field. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using - HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name in the server's - certificate to match the host name of the server when connecting over SSL. The default value is - true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of the server when - connecting over SSL. The default value is true. - :type use_peer_verification: object + :param subscription_id: Required. Azure ML Service workspace subscription ID. Type: string (or + Expression with resultType string). + :type subscription_id: object + :param resource_group_name: Required. Azure ML Service workspace resource group name. Type: + string (or Expression with resultType string). + :type resource_group_name: object + :param ml_workspace_name: Required. Azure ML Service workspace name. Type: string (or + Expression with resultType string). + :type ml_workspace_name: object + :param service_principal_id: The ID of the service principal used to authenticate against the + endpoint of a published Azure ML Service pipeline. Type: string (or Expression with resultType + string). + :type service_principal_id: object + :param service_principal_key: The key of the service principal used to authenticate against the + endpoint of a published Azure ML Service pipeline. + :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal belongs. Type: + string (or Expression with resultType string). + :type tenant: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -4561,8 +4738,9 @@ class ConcurLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'client_id': {'required': True}, - 'username': {'required': True}, + 'subscription_id': {'required': True}, + 'resource_group_name': {'required': True}, + 'ml_workspace_name': {'required': True}, } _attribute_map = { @@ -4572,294 +4750,224 @@ class ConcurLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'subscription_id': {'key': 'typeProperties.subscriptionId', 'type': 'object'}, + 'resource_group_name': {'key': 'typeProperties.resourceGroupName', 'type': 'object'}, + 'ml_workspace_name': {'key': 'typeProperties.mlWorkspaceName', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, *, - client_id: object, - username: object, + subscription_id: object, + resource_group_name: object, + ml_workspace_name: object, additional_properties: Optional[Dict[str, object]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, - password: Optional["SecretBase"] = None, - use_encrypted_endpoints: Optional[object] = None, - use_host_verification: Optional[object] = None, - use_peer_verification: Optional[object] = None, + service_principal_id: Optional[object] = None, + service_principal_key: Optional["SecretBase"] = None, + tenant: Optional[object] = None, encrypted_credential: Optional[object] = None, **kwargs ): - super(ConcurLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'Concur' # type: str - self.client_id = client_id - self.username = username - self.password = password - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification + super(AzureMLServiceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'AzureMLService' # type: str + self.subscription_id = subscription_id + self.resource_group_name = resource_group_name + self.ml_workspace_name = ml_workspace_name + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant self.encrypted_credential = encrypted_credential -class ConcurObjectDataset(Dataset): - """Concur Service dataset. +class AzureMLUpdateResourceActivity(ExecutionActivity): + """Azure ML Update Resource management activity. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. :type type: str - :param description: Dataset description. + :param description: Activity description. :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param trained_model_name: Required. Name of the Trained Model module in the Web Service + experiment to be updated. Type: string (or Expression with resultType string). + :type trained_model_name: object + :param trained_model_linked_service_name: Required. Name of Azure Storage linked service + holding the .ilearner file that will be uploaded by the update operation. + :type trained_model_linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param trained_model_file_path: Required. The relative file path in trainedModelLinkedService + to represent the .ilearner file that will be uploaded by the update operation. Type: string + (or Expression with resultType string). + :type trained_model_file_path: object """ _validation = { + 'name': {'required': True}, 'type': {'required': True}, - 'linked_service_name': {'required': True}, + 'trained_model_name': {'required': True}, + 'trained_model_linked_service_name': {'required': True}, + 'trained_model_file_path': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'trained_model_name': {'key': 'typeProperties.trainedModelName', 'type': 'object'}, + 'trained_model_linked_service_name': {'key': 'typeProperties.trainedModelLinkedServiceName', 'type': 'LinkedServiceReference'}, + 'trained_model_file_path': {'key': 'typeProperties.trainedModelFilePath', 'type': 'object'}, } def __init__( self, *, - linked_service_name: "LinkedServiceReference", + name: str, + trained_model_name: object, + trained_model_linked_service_name: "LinkedServiceReference", + trained_model_file_path: object, additional_properties: Optional[Dict[str, object]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, + depends_on: Optional[List["ActivityDependency"]] = None, + user_properties: Optional[List["UserProperty"]] = None, + linked_service_name: Optional["LinkedServiceReference"] = None, + policy: Optional["ActivityPolicy"] = None, **kwargs ): - super(ConcurObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'ConcurObject' # type: str - self.table_name = table_name + super(AzureMLUpdateResourceActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.type = 'AzureMLUpdateResource' # type: str + self.trained_model_name = trained_model_name + self.trained_model_linked_service_name = trained_model_linked_service_name + self.trained_model_file_path = trained_model_file_path -class ControlActivity(Activity): - """Base class for all control activities like IfCondition, ForEach , Until. +class AzureMLWebServiceFile(msrest.serialization.Model): + """Azure ML WebService Input/Output file. All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param file_path: Required. The relative file path, including container name, in the Azure Blob + Storage specified by the LinkedService. Type: string (or Expression with resultType string). + :type file_path: object + :param linked_service_name: Required. Reference to an Azure Storage LinkedService, where Azure + ML WebService Input/Output file located. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference """ _validation = { - 'name': {'required': True}, - 'type': {'required': True}, + 'file_path': {'required': True}, + 'linked_service_name': {'required': True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'file_path': {'key': 'filePath', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, } def __init__( self, *, - name: str, - additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - depends_on: Optional[List["ActivityDependency"]] = None, - user_properties: Optional[List["UserProperty"]] = None, + file_path: object, + linked_service_name: "LinkedServiceReference", **kwargs ): - super(ControlActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.type = 'Container' # type: str + super(AzureMLWebServiceFile, self).__init__(**kwargs) + self.file_path = file_path + self.linked_service_name = linked_service_name -class CopyActivity(ExecutionActivity): - """Copy activity. +class AzureMySqlLinkedService(LinkedService): + """Azure MySQL database linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. Type of linked service.Constant filled by server. :type type: str - :param description: Activity description. + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param inputs: List of inputs for the activity. - :type inputs: list[~azure.synapse.artifacts.models.DatasetReference] - :param outputs: List of outputs for the activity. - :type outputs: list[~azure.synapse.artifacts.models.DatasetReference] - :param source: Required. Copy activity source. - :type source: ~azure.synapse.artifacts.models.CopySource - :param sink: Required. Copy activity sink. - :type sink: ~azure.synapse.artifacts.models.CopySink - :param translator: Copy activity translator. If not specified, tabular translator is used. - :type translator: object - :param enable_staging: Specifies whether to copy data via an interim staging. Default value is - false. Type: boolean (or Expression with resultType boolean). - :type enable_staging: object - :param staging_settings: Specifies interim staging settings when EnableStaging is true. - :type staging_settings: ~azure.synapse.artifacts.models.StagingSettings - :param parallel_copies: Maximum number of concurrent sessions opened on the source or sink to - avoid overloading the data store. Type: integer (or Expression with resultType integer), - minimum: 0. - :type parallel_copies: object - :param data_integration_units: Maximum number of data integration units that can be used to - perform this data movement. Type: integer (or Expression with resultType integer), minimum: 0. - :type data_integration_units: object - :param enable_skip_incompatible_row: Whether to skip incompatible row. Default value is false. - Type: boolean (or Expression with resultType boolean). - :type enable_skip_incompatible_row: object - :param redirect_incompatible_row_settings: Redirect incompatible row settings when - EnableSkipIncompatibleRow is true. - :type redirect_incompatible_row_settings: - ~azure.synapse.artifacts.models.RedirectIncompatibleRowSettings - :param preserve_rules: Preserve Rules. - :type preserve_rules: list[object] - :param preserve: Preserve rules. - :type preserve: list[object] + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param connection_string: Required. The connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param password: The Azure key vault secret reference of password in connection string. + :type password: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, - 'source': {'required': True}, - 'sink': {'required': True}, + 'connection_string': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'inputs': {'key': 'inputs', 'type': '[DatasetReference]'}, - 'outputs': {'key': 'outputs', 'type': '[DatasetReference]'}, - 'source': {'key': 'typeProperties.source', 'type': 'CopySource'}, - 'sink': {'key': 'typeProperties.sink', 'type': 'CopySink'}, - 'translator': {'key': 'typeProperties.translator', 'type': 'object'}, - 'enable_staging': {'key': 'typeProperties.enableStaging', 'type': 'object'}, - 'staging_settings': {'key': 'typeProperties.stagingSettings', 'type': 'StagingSettings'}, - 'parallel_copies': {'key': 'typeProperties.parallelCopies', 'type': 'object'}, - 'data_integration_units': {'key': 'typeProperties.dataIntegrationUnits', 'type': 'object'}, - 'enable_skip_incompatible_row': {'key': 'typeProperties.enableSkipIncompatibleRow', 'type': 'object'}, - 'redirect_incompatible_row_settings': {'key': 'typeProperties.redirectIncompatibleRowSettings', 'type': 'RedirectIncompatibleRowSettings'}, - 'preserve_rules': {'key': 'typeProperties.preserveRules', 'type': '[object]'}, - 'preserve': {'key': 'typeProperties.preserve', 'type': '[object]'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, *, - name: str, - source: "CopySource", - sink: "CopySink", + connection_string: object, additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, - depends_on: Optional[List["ActivityDependency"]] = None, - user_properties: Optional[List["UserProperty"]] = None, - linked_service_name: Optional["LinkedServiceReference"] = None, - policy: Optional["ActivityPolicy"] = None, - inputs: Optional[List["DatasetReference"]] = None, - outputs: Optional[List["DatasetReference"]] = None, - translator: Optional[object] = None, - enable_staging: Optional[object] = None, - staging_settings: Optional["StagingSettings"] = None, - parallel_copies: Optional[object] = None, - data_integration_units: Optional[object] = None, - enable_skip_incompatible_row: Optional[object] = None, - redirect_incompatible_row_settings: Optional["RedirectIncompatibleRowSettings"] = None, - preserve_rules: Optional[List[object]] = None, - preserve: Optional[List[object]] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + password: Optional["AzureKeyVaultSecretReference"] = None, + encrypted_credential: Optional[object] = None, **kwargs ): - super(CopyActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type = 'Copy' # type: str - self.inputs = inputs - self.outputs = outputs - self.source = source - self.sink = sink - self.translator = translator - self.enable_staging = enable_staging - self.staging_settings = staging_settings - self.parallel_copies = parallel_copies - self.data_integration_units = data_integration_units - self.enable_skip_incompatible_row = enable_skip_incompatible_row - self.redirect_incompatible_row_settings = redirect_incompatible_row_settings - self.preserve_rules = preserve_rules - self.preserve = preserve - + super(AzureMySqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'AzureMySql' # type: str + self.connection_string = connection_string + self.password = password + self.encrypted_credential = encrypted_credential -class CopySink(msrest.serialization.Model): - """A copy activity sink. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: . +class AzureMySqlSink(CopySink): + """A copy activity Azure MySql sink. All required parameters must be populated in order to send to Azure. @@ -4883,6 +4991,9 @@ class CopySink(msrest.serialization.Model): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param pre_copy_script: A query to execute before starting the copy. Type: string (or + Expression with resultType string). + :type pre_copy_script: object """ _validation = { @@ -4897,10 +5008,7 @@ class CopySink(msrest.serialization.Model): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - } - - _subtype_map = { - 'type': {} + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } def __init__( @@ -4912,23 +5020,16 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + pre_copy_script: Optional[object] = None, **kwargs ): - super(CopySink, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.type = 'CopySink' # type: str - self.write_batch_size = write_batch_size - self.write_batch_timeout = write_batch_timeout - self.sink_retry_count = sink_retry_count - self.sink_retry_wait = sink_retry_wait - self.max_concurrent_connections = max_concurrent_connections + super(AzureMySqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'AzureMySqlSink' # type: str + self.pre_copy_script = pre_copy_script -class CopySource(msrest.serialization.Model): - """A copy activity source. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: . +class AzureMySqlSource(TabularSource): + """A copy activity Azure MySQL source. All required parameters must be populated in order to send to Azure. @@ -4946,6 +5047,11 @@ class CopySource(msrest.serialization.Model): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: Database query. Type: string (or Expression with resultType string). + :type query: object """ _validation = { @@ -4958,10 +5064,8 @@ class CopySource(msrest.serialization.Model): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - } - - _subtype_map = { - 'type': {} + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, } def __init__( @@ -4971,95 +5075,17 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, **kwargs ): - super(CopySource, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.type = 'CopySource' # type: str - self.source_retry_count = source_retry_count - self.source_retry_wait = source_retry_wait - self.max_concurrent_connections = max_concurrent_connections - - -class CosmosDbLinkedService(LinkedService): - """Microsoft Azure Cosmos Database (CosmosDB) linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: The connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param account_endpoint: The endpoint of the Azure CosmosDB account. Type: string (or - Expression with resultType string). - :type account_endpoint: object - :param database: The name of the database. Type: string (or Expression with resultType string). - :type database: object - :param account_key: The account key of the Azure CosmosDB account. Type: SecureString or - AzureKeyVaultSecretReference. - :type account_key: ~azure.synapse.artifacts.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'account_endpoint': {'key': 'typeProperties.accountEndpoint', 'type': 'object'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, - 'account_key': {'key': 'typeProperties.accountKey', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - *, - additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, - description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - connection_string: Optional[object] = None, - account_endpoint: Optional[object] = None, - database: Optional[object] = None, - account_key: Optional["SecretBase"] = None, - encrypted_credential: Optional[object] = None, - **kwargs - ): - super(CosmosDbLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'CosmosDb' # type: str - self.connection_string = connection_string - self.account_endpoint = account_endpoint - self.database = database - self.account_key = account_key - self.encrypted_credential = encrypted_credential + super(AzureMySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'AzureMySqlSource' # type: str + self.query = query -class CosmosDbMongoDbApiCollectionDataset(Dataset): - """The CosmosDB (MongoDB API) database dataset. +class AzureMySqlTableDataset(Dataset): + """The Azure MySQL database dataset. All required parameters must be populated in order to send to Azure. @@ -5085,15 +5111,17 @@ class CosmosDbMongoDbApiCollectionDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param collection: Required. The collection name of the CosmosDB (MongoDB API) database. Type: - string (or Expression with resultType string). - :type collection: object + :param table_name: The Azure MySQL database table name. Type: string (or Expression with + resultType string). + :type table_name: object + :param table: The name of Azure MySQL database table. Type: string (or Expression with + resultType string). + :type table: object """ _validation = { 'type': {'required': True}, 'linked_service_name': {'required': True}, - 'collection': {'required': True}, } _attribute_map = { @@ -5106,14 +5134,14 @@ class CosmosDbMongoDbApiCollectionDataset(Dataset): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, } def __init__( self, *, linked_service_name: "LinkedServiceReference", - collection: object, additional_properties: Optional[Dict[str, object]] = None, description: Optional[str] = None, structure: Optional[object] = None, @@ -5121,15 +5149,18 @@ def __init__( parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, folder: Optional["DatasetFolder"] = None, + table_name: Optional[object] = None, + table: Optional[object] = None, **kwargs ): - super(CosmosDbMongoDbApiCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'CosmosDbMongoDbApiCollection' # type: str - self.collection = collection + super(AzureMySqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'AzureMySqlTable' # type: str + self.table_name = table_name + self.table = table -class CosmosDbMongoDbApiLinkedService(LinkedService): - """Linked service for CosmosDB (MongoDB API) data source. +class AzurePostgreSqlLinkedService(LinkedService): + """Azure PostgreSQL linked service. All required parameters must be populated in order to send to Azure. @@ -5146,19 +5177,19 @@ class CosmosDbMongoDbApiLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param connection_string: Required. The CosmosDB (MongoDB API) connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or + :param connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object - :param database: Required. The name of the CosmosDB (MongoDB API) database that you want to - access. Type: string (or Expression with resultType string). - :type database: object - """ + :param password: The Azure key vault secret reference of password in connection string. + :type password: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ _validation = { 'type': {'required': True}, - 'connection_string': {'required': True}, - 'database': {'required': True}, } _attribute_map = { @@ -5169,29 +5200,151 @@ class CosmosDbMongoDbApiLinkedService(LinkedService): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, *, - connection_string: object, - database: object, additional_properties: Optional[Dict[str, object]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, + connection_string: Optional[object] = None, + password: Optional["AzureKeyVaultSecretReference"] = None, + encrypted_credential: Optional[object] = None, **kwargs ): - super(CosmosDbMongoDbApiLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'CosmosDbMongoDbApi' # type: str + super(AzurePostgreSqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'AzurePostgreSql' # type: str self.connection_string = connection_string - self.database = database + self.password = password + self.encrypted_credential = encrypted_credential -class CosmosDbSqlApiCollectionDataset(Dataset): - """Microsoft Azure CosmosDB (SQL API) Collection dataset. +class AzurePostgreSqlSink(CopySink): + """A copy activity Azure PostgreSQL sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param pre_copy_script: A query to execute before starting the copy. Type: string (or + Expression with resultType string). + :type pre_copy_script: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + pre_copy_script: Optional[object] = None, + **kwargs + ): + super(AzurePostgreSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'AzurePostgreSqlSink' # type: str + self.pre_copy_script = pre_copy_script + + +class AzurePostgreSqlSource(TabularSource): + """A copy activity Azure PostgreSQL source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(AzurePostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'AzurePostgreSqlSource' # type: str + self.query = query + + +class AzurePostgreSqlTableDataset(Dataset): + """Azure PostgreSQL dataset. All required parameters must be populated in order to send to Azure. @@ -5217,15 +5370,20 @@ class CosmosDbSqlApiCollectionDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param collection_name: Required. CosmosDB (SQL API) collection name. Type: string (or - Expression with resultType string). - :type collection_name: object + :param table_name: The table name of the Azure PostgreSQL database which includes both schema + and table. Type: string (or Expression with resultType string). + :type table_name: object + :param table: The table name of the Azure PostgreSQL database. Type: string (or Expression with + resultType string). + :type table: object + :param schema_type_properties_schema: The schema name of the Azure PostgreSQL database. Type: + string (or Expression with resultType string). + :type schema_type_properties_schema: object """ _validation = { 'type': {'required': True}, 'linked_service_name': {'required': True}, - 'collection_name': {'required': True}, } _attribute_map = { @@ -5238,14 +5396,15 @@ class CosmosDbSqlApiCollectionDataset(Dataset): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'collection_name': {'key': 'typeProperties.collectionName', 'type': 'object'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } def __init__( self, *, linked_service_name: "LinkedServiceReference", - collection_name: object, additional_properties: Optional[Dict[str, object]] = None, description: Optional[str] = None, structure: Optional[object] = None, @@ -5253,40 +5412,43 @@ def __init__( parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, folder: Optional["DatasetFolder"] = None, + table_name: Optional[object] = None, + table: Optional[object] = None, + schema_type_properties_schema: Optional[object] = None, **kwargs ): - super(CosmosDbSqlApiCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'CosmosDbSqlApiCollection' # type: str - self.collection_name = collection_name + super(AzurePostgreSqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'AzurePostgreSqlTable' # type: str + self.table_name = table_name + self.table = table + self.schema_type_properties_schema = schema_type_properties_schema -class CouchbaseLinkedService(LinkedService): - """Couchbase server linked service. +class AzureQueueSink(CopySink): + """A copy activity Azure Queue sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Copy sink type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: An ODBC connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param cred_string: The Azure key vault secret reference of credString in connection string. - :type cred_string: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object """ _validation = { @@ -5296,37 +5458,30 @@ class CouchbaseLinkedService(LinkedService): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'cred_string': {'key': 'typeProperties.credString', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, } def __init__( self, *, additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, - description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - connection_string: Optional[object] = None, - cred_string: Optional["AzureKeyVaultSecretReference"] = None, - encrypted_credential: Optional[object] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, **kwargs ): - super(CouchbaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'Couchbase' # type: str - self.connection_string = connection_string - self.cred_string = cred_string - self.encrypted_credential = encrypted_credential + super(AzureQueueSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'AzureQueueSink' # type: str -class CouchbaseTableDataset(Dataset): - """Couchbase server dataset. +class AzureSearchIndexDataset(Dataset): + """The Azure Search Index. All required parameters must be populated in order to send to Azure. @@ -5352,13 +5507,15 @@ class CouchbaseTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :param index_name: Required. The name of the Azure Search Index. Type: string (or Expression + with resultType string). + :type index_name: object """ _validation = { 'type': {'required': True}, 'linked_service_name': {'required': True}, + 'index_name': {'required': True}, } _attribute_map = { @@ -5371,13 +5528,14 @@ class CouchbaseTableDataset(Dataset): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'index_name': {'key': 'typeProperties.indexName', 'type': 'object'}, } def __init__( self, *, linked_service_name: "LinkedServiceReference", + index_name: object, additional_properties: Optional[Dict[str, object]] = None, description: Optional[str] = None, structure: Optional[object] = None, @@ -5385,228 +5543,143 @@ def __init__( parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, **kwargs ): - super(CouchbaseTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'CouchbaseTable' # type: str - self.table_name = table_name + super(AzureSearchIndexDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'AzureSearchIndex' # type: str + self.index_name = index_name -class CreateDataFlowDebugSessionRequest(msrest.serialization.Model): - """Request body structure for creating data flow debug session. +class AzureSearchIndexSink(CopySink): + """A copy activity Azure Search Index sink. - :param data_flow_name: The name of the data flow. - :type data_flow_name: str - :param existing_cluster_id: The ID of existing Databricks cluster. - :type existing_cluster_id: str - :param cluster_timeout: Timeout setting for Databricks cluster. - :type cluster_timeout: int - :param new_cluster_name: The name of new Databricks cluster. - :type new_cluster_name: str - :param new_cluster_node_type: The type of new Databricks cluster. - :type new_cluster_node_type: str - :param data_bricks_linked_service: Data bricks linked service. - :type data_bricks_linked_service: ~azure.synapse.artifacts.models.LinkedServiceResource - """ + All required parameters must be populated in order to send to Azure. - _attribute_map = { - 'data_flow_name': {'key': 'dataFlowName', 'type': 'str'}, - 'existing_cluster_id': {'key': 'existingClusterId', 'type': 'str'}, - 'cluster_timeout': {'key': 'clusterTimeout', 'type': 'int'}, - 'new_cluster_name': {'key': 'newClusterName', 'type': 'str'}, - 'new_cluster_node_type': {'key': 'newClusterNodeType', 'type': 'str'}, - 'data_bricks_linked_service': {'key': 'dataBricksLinkedService', 'type': 'LinkedServiceResource'}, - } - - def __init__( - self, - *, - data_flow_name: Optional[str] = None, - existing_cluster_id: Optional[str] = None, - cluster_timeout: Optional[int] = None, - new_cluster_name: Optional[str] = None, - new_cluster_node_type: Optional[str] = None, - data_bricks_linked_service: Optional["LinkedServiceResource"] = None, - **kwargs - ): - super(CreateDataFlowDebugSessionRequest, self).__init__(**kwargs) - self.data_flow_name = data_flow_name - self.existing_cluster_id = existing_cluster_id - self.cluster_timeout = cluster_timeout - self.new_cluster_name = new_cluster_name - self.new_cluster_node_type = new_cluster_node_type - self.data_bricks_linked_service = data_bricks_linked_service - - -class CreateDataFlowDebugSessionResponse(msrest.serialization.Model): - """Response body structure for creating data flow debug session. - - :param session_id: The ID of data flow debug session. - :type session_id: str - """ - - _attribute_map = { - 'session_id': {'key': 'sessionId', 'type': 'str'}, - } - - def __init__( - self, - *, - session_id: Optional[str] = None, - **kwargs - ): - super(CreateDataFlowDebugSessionResponse, self).__init__(**kwargs) - self.session_id = session_id - - -class CreateRunResponse(msrest.serialization.Model): - """Response body with a run identifier. - - All required parameters must be populated in order to send to Azure. - - :param run_id: Required. Identifier of a run. - :type run_id: str + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param write_behavior: Specify the write behavior when upserting documents into Azure Search + Index. Possible values include: "Merge", "Upload". + :type write_behavior: str or ~azure.synapse.artifacts.models.AzureSearchIndexWriteBehaviorType """ _validation = { - 'run_id': {'required': True}, + 'type': {'required': True}, } _attribute_map = { - 'run_id': {'key': 'runId', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, } def __init__( self, *, - run_id: str, + additional_properties: Optional[Dict[str, object]] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + write_behavior: Optional[Union[str, "AzureSearchIndexWriteBehaviorType"]] = None, **kwargs ): - super(CreateRunResponse, self).__init__(**kwargs) - self.run_id = run_id + super(AzureSearchIndexSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'AzureSearchIndexSink' # type: str + self.write_behavior = write_behavior -class CustomActivity(ExecutionActivity): - """Custom activity type. +class AzureSearchLinkedService(LinkedService): + """Linked service for Windows Azure Search Service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. Type of linked service.Constant filled by server. :type type: str - :param description: Activity description. + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param command: Required. Command for custom activity Type: string (or Expression with - resultType string). - :type command: object - :param resource_linked_service: Resource linked service reference. - :type resource_linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference - :param folder_path: Folder path for resource files Type: string (or Expression with resultType + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param url: Required. URL for Azure Search service. Type: string (or Expression with resultType string). - :type folder_path: object - :param reference_objects: Reference objects. - :type reference_objects: ~azure.synapse.artifacts.models.CustomActivityReferenceObject - :param extended_properties: User defined property bag. There is no restriction on the keys or - values that can be used. The user specified custom activity has the full responsibility to - consume and interpret the content defined. - :type extended_properties: dict[str, object] - :param retention_time_in_days: The retention time for the files submitted for custom activity. - Type: double (or Expression with resultType double). - :type retention_time_in_days: object + :type url: object + :param key: Admin Key for Azure Search service. + :type key: ~azure.synapse.artifacts.models.SecretBase + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, - 'command': {'required': True}, + 'url': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'command': {'key': 'typeProperties.command', 'type': 'object'}, - 'resource_linked_service': {'key': 'typeProperties.resourceLinkedService', 'type': 'LinkedServiceReference'}, - 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, - 'reference_objects': {'key': 'typeProperties.referenceObjects', 'type': 'CustomActivityReferenceObject'}, - 'extended_properties': {'key': 'typeProperties.extendedProperties', 'type': '{object}'}, - 'retention_time_in_days': {'key': 'typeProperties.retentionTimeInDays', 'type': 'object'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'key': {'key': 'typeProperties.key', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, *, - name: str, - command: object, + url: object, additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, - depends_on: Optional[List["ActivityDependency"]] = None, - user_properties: Optional[List["UserProperty"]] = None, - linked_service_name: Optional["LinkedServiceReference"] = None, - policy: Optional["ActivityPolicy"] = None, - resource_linked_service: Optional["LinkedServiceReference"] = None, - folder_path: Optional[object] = None, - reference_objects: Optional["CustomActivityReferenceObject"] = None, - extended_properties: Optional[Dict[str, object]] = None, - retention_time_in_days: Optional[object] = None, - **kwargs - ): - super(CustomActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type = 'Custom' # type: str - self.command = command - self.resource_linked_service = resource_linked_service - self.folder_path = folder_path - self.reference_objects = reference_objects - self.extended_properties = extended_properties - self.retention_time_in_days = retention_time_in_days - - -class CustomActivityReferenceObject(msrest.serialization.Model): - """Reference objects for custom activity. - - :param linked_services: Linked service references. - :type linked_services: list[~azure.synapse.artifacts.models.LinkedServiceReference] - :param datasets: Dataset references. - :type datasets: list[~azure.synapse.artifacts.models.DatasetReference] - """ - - _attribute_map = { - 'linked_services': {'key': 'linkedServices', 'type': '[LinkedServiceReference]'}, - 'datasets': {'key': 'datasets', 'type': '[DatasetReference]'}, - } - - def __init__( - self, - *, - linked_services: Optional[List["LinkedServiceReference"]] = None, - datasets: Optional[List["DatasetReference"]] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + key: Optional["SecretBase"] = None, + encrypted_credential: Optional[object] = None, **kwargs ): - super(CustomActivityReferenceObject, self).__init__(**kwargs) - self.linked_services = linked_services - self.datasets = datasets + super(AzureSearchLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'AzureSearch' # type: str + self.url = url + self.key = key + self.encrypted_credential = encrypted_credential -class CustomDataSourceLinkedService(LinkedService): - """Custom linked service. +class AzureSqlDatabaseLinkedService(LinkedService): + """Microsoft Azure SQL Database linked service. All required parameters must be populated in order to send to Azure. @@ -5623,13 +5696,29 @@ class CustomDataSourceLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type_properties: Required. Custom linked service properties. - :type type_properties: object + :param connection_string: Required. The connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param password: The Azure key vault secret reference of password in connection string. + :type password: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param service_principal_id: The ID of the service principal used to authenticate against Azure + SQL Database. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The key of the service principal used to authenticate against + Azure SQL Database. + :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal belongs. Type: + string (or Expression with resultType string). + :type tenant: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object """ _validation = { 'type': {'required': True}, - 'type_properties': {'required': True}, + 'connection_string': {'required': True}, } _attribute_map = { @@ -5639,1176 +5728,1910 @@ class CustomDataSourceLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type_properties': {'key': 'typeProperties', 'type': 'object'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, *, - type_properties: object, + connection_string: object, additional_properties: Optional[Dict[str, object]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, - **kwargs + password: Optional["AzureKeyVaultSecretReference"] = None, + service_principal_id: Optional[object] = None, + service_principal_key: Optional["SecretBase"] = None, + tenant: Optional[object] = None, + encrypted_credential: Optional[object] = None, + **kwargs ): - super(CustomDataSourceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'CustomDataSource' # type: str - self.type_properties = type_properties + super(AzureSqlDatabaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'AzureSqlDatabase' # type: str + self.connection_string = connection_string + self.password = password + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.encrypted_credential = encrypted_credential -class DatabricksNotebookActivity(ExecutionActivity): - """DatabricksNotebook activity. +class AzureSqlDWLinkedService(LinkedService): + """Azure SQL Data Warehouse linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. Type of linked service.Constant filled by server. :type type: str - :param description: Activity description. + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param notebook_path: Required. The absolute path of the notebook to be run in the Databricks - Workspace. This path must begin with a slash. Type: string (or Expression with resultType - string). - :type notebook_path: object - :param base_parameters: Base parameters to be used for each run of this job.If the notebook - takes a parameter that is not specified, the default value from the notebook will be used. - :type base_parameters: dict[str, object] - :param libraries: A list of libraries to be installed on the cluster that will execute the job. - :type libraries: list[dict[str, object]] + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param connection_string: Required. The connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param password: The Azure key vault secret reference of password in connection string. + :type password: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param service_principal_id: The ID of the service principal used to authenticate against Azure + SQL Data Warehouse. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The key of the service principal used to authenticate against + Azure SQL Data Warehouse. + :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal belongs. Type: + string (or Expression with resultType string). + :type tenant: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, - 'notebook_path': {'required': True}, + 'connection_string': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'notebook_path': {'key': 'typeProperties.notebookPath', 'type': 'object'}, - 'base_parameters': {'key': 'typeProperties.baseParameters', 'type': '{object}'}, - 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, *, - name: str, - notebook_path: object, + connection_string: object, additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, - depends_on: Optional[List["ActivityDependency"]] = None, - user_properties: Optional[List["UserProperty"]] = None, - linked_service_name: Optional["LinkedServiceReference"] = None, - policy: Optional["ActivityPolicy"] = None, - base_parameters: Optional[Dict[str, object]] = None, - libraries: Optional[List[Dict[str, object]]] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + password: Optional["AzureKeyVaultSecretReference"] = None, + service_principal_id: Optional[object] = None, + service_principal_key: Optional["SecretBase"] = None, + tenant: Optional[object] = None, + encrypted_credential: Optional[object] = None, **kwargs ): - super(DatabricksNotebookActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type = 'DatabricksNotebook' # type: str - self.notebook_path = notebook_path - self.base_parameters = base_parameters - self.libraries = libraries + super(AzureSqlDWLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'AzureSqlDW' # type: str + self.connection_string = connection_string + self.password = password + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.encrypted_credential = encrypted_credential -class DatabricksSparkJarActivity(ExecutionActivity): - """DatabricksSparkJar activity. +class AzureSqlDWTableDataset(Dataset): + """The Azure SQL Data Warehouse dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. Type of dataset.Constant filled by server. :type type: str - :param description: Activity description. + :param description: Dataset description. :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param main_class_name: Required. The full name of the class containing the main method to be - executed. This class must be contained in a JAR provided as a library. Type: string (or - Expression with resultType string). - :type main_class_name: object - :param parameters: Parameters that will be passed to the main method. - :type parameters: list[object] - :param libraries: A list of libraries to be installed on the cluster that will execute the job. - :type libraries: list[dict[str, object]] + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: This property will be retired. Please consider using schema + table + properties instead. + :type table_name: object + :param schema_type_properties_schema: The schema name of the Azure SQL Data Warehouse. Type: + string (or Expression with resultType string). + :type schema_type_properties_schema: object + :param table: The table name of the Azure SQL Data Warehouse. Type: string (or Expression with + resultType string). + :type table: object """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, - 'main_class_name': {'required': True}, + 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'main_class_name': {'key': 'typeProperties.mainClassName', 'type': 'object'}, - 'parameters': {'key': 'typeProperties.parameters', 'type': '[object]'}, - 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, } def __init__( self, *, - name: str, - main_class_name: object, + linked_service_name: "LinkedServiceReference", additional_properties: Optional[Dict[str, object]] = None, description: Optional[str] = None, - depends_on: Optional[List["ActivityDependency"]] = None, - user_properties: Optional[List["UserProperty"]] = None, - linked_service_name: Optional["LinkedServiceReference"] = None, - policy: Optional["ActivityPolicy"] = None, - parameters: Optional[List[object]] = None, - libraries: Optional[List[Dict[str, object]]] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + table_name: Optional[object] = None, + schema_type_properties_schema: Optional[object] = None, + table: Optional[object] = None, **kwargs ): - super(DatabricksSparkJarActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type = 'DatabricksSparkJar' # type: str - self.main_class_name = main_class_name - self.parameters = parameters - self.libraries = libraries + super(AzureSqlDWTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'AzureSqlDWTable' # type: str + self.table_name = table_name + self.schema_type_properties_schema = schema_type_properties_schema + self.table = table -class DatabricksSparkPythonActivity(ExecutionActivity): - """DatabricksSparkPython activity. +class AzureSqlMILinkedService(LinkedService): + """Azure SQL Managed Instance linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. Type of linked service.Constant filled by server. :type type: str - :param description: Activity description. + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param python_file: Required. The URI of the Python file to be executed. DBFS paths are - supported. Type: string (or Expression with resultType string). - :type python_file: object - :param parameters: Command line parameters that will be passed to the Python file. - :type parameters: list[object] - :param libraries: A list of libraries to be installed on the cluster that will execute the job. - :type libraries: list[dict[str, object]] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'python_file': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'python_file': {'key': 'typeProperties.pythonFile', 'type': 'object'}, - 'parameters': {'key': 'typeProperties.parameters', 'type': '[object]'}, - 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param connection_string: Required. The connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param password: The Azure key vault secret reference of password in connection string. + :type password: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param service_principal_id: The ID of the service principal used to authenticate against Azure + SQL Managed Instance. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The key of the service principal used to authenticate against + Azure SQL Managed Instance. + :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal belongs. Type: + string (or Expression with resultType string). + :type tenant: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, *, - name: str, - python_file: object, + connection_string: object, additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, - depends_on: Optional[List["ActivityDependency"]] = None, - user_properties: Optional[List["UserProperty"]] = None, - linked_service_name: Optional["LinkedServiceReference"] = None, - policy: Optional["ActivityPolicy"] = None, - parameters: Optional[List[object]] = None, - libraries: Optional[List[Dict[str, object]]] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + password: Optional["AzureKeyVaultSecretReference"] = None, + service_principal_id: Optional[object] = None, + service_principal_key: Optional["SecretBase"] = None, + tenant: Optional[object] = None, + encrypted_credential: Optional[object] = None, **kwargs ): - super(DatabricksSparkPythonActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type = 'DatabricksSparkPython' # type: str - self.python_file = python_file - self.parameters = parameters - self.libraries = libraries - + super(AzureSqlMILinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'AzureSqlMI' # type: str + self.connection_string = connection_string + self.password = password + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.encrypted_credential = encrypted_credential -class DataFlow(msrest.serialization.Model): - """Azure Synapse nested object which contains a flow with data movements and transformations. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: MappingDataFlow. +class AzureSqlMITableDataset(Dataset): + """The Azure SQL Managed Instance dataset. All required parameters must be populated in order to send to Azure. - :param type: Required. Type of data flow.Constant filled by server. + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. :type type: str - :param description: The description of the data flow. + :param description: Dataset description. :type description: str - :param annotations: List of tags that can be used for describing the data flow. + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] - :param folder: The folder that this data flow is in. If not specified, Data flow will appear at - the root level. - :type folder: ~azure.synapse.artifacts.models.DataFlowFolder + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: This property will be retired. Please consider using schema + table + properties instead. + :type table_name: object + :param schema_type_properties_schema: The schema name of the Azure SQL Managed Instance. Type: + string (or Expression with resultType string). + :type schema_type_properties_schema: object + :param table: The table name of the Azure SQL Managed Instance dataset. Type: string (or + Expression with resultType string). + :type table: object """ _validation = { 'type': {'required': True}, + 'linked_service_name': {'required': True}, } _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DataFlowFolder'}, - } - - _subtype_map = { - 'type': {'MappingDataFlow': 'MappingDataFlow'} + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, } def __init__( self, *, + linked_service_name: "LinkedServiceReference", + additional_properties: Optional[Dict[str, object]] = None, description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, - folder: Optional["DataFlowFolder"] = None, + folder: Optional["DatasetFolder"] = None, + table_name: Optional[object] = None, + schema_type_properties_schema: Optional[object] = None, + table: Optional[object] = None, **kwargs ): - super(DataFlow, self).__init__(**kwargs) - self.type = None # type: Optional[str] - self.description = description - self.annotations = annotations - self.folder = folder + super(AzureSqlMITableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'AzureSqlMITable' # type: str + self.table_name = table_name + self.schema_type_properties_schema = schema_type_properties_schema + self.table = table -class DataFlowDebugCommandRequest(msrest.serialization.Model): - """Request body structure for data flow expression preview. +class AzureSqlSink(CopySink): + """A copy activity Azure SQL sink. All required parameters must be populated in order to send to Azure. - :param session_id: Required. The ID of data flow debug session. - :type session_id: str - :param data_flow_name: The data flow which contains the debug session. - :type data_flow_name: str - :param command_name: The command name. - :type command_name: str - :param command_payload: Required. The command payload object. - :type command_payload: object + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or + Expression with resultType string). + :type sql_writer_stored_procedure_name: object + :param sql_writer_table_type: SQL writer table type. Type: string (or Expression with + resultType string). + :type sql_writer_table_type: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType + string). + :type pre_copy_script: object + :param stored_procedure_parameters: SQL stored procedure parameters. + :type stored_procedure_parameters: dict[str, + ~azure.synapse.artifacts.models.StoredProcedureParameter] + :param stored_procedure_table_type_parameter_name: The stored procedure parameter name of the + table type. Type: string (or Expression with resultType string). + :type stored_procedure_table_type_parameter_name: object + :param table_option: The option to handle sink table, such as autoCreate. For now only + 'autoCreate' value is supported. Type: string (or Expression with resultType string). + :type table_option: object """ _validation = { - 'session_id': {'required': True}, - 'command_payload': {'required': True}, + 'type': {'required': True}, } _attribute_map = { - 'session_id': {'key': 'sessionId', 'type': 'str'}, - 'data_flow_name': {'key': 'dataFlowName', 'type': 'str'}, - 'command_name': {'key': 'commandName', 'type': 'str'}, - 'command_payload': {'key': 'commandPayload', 'type': 'object'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, + 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, } def __init__( self, *, - session_id: str, - command_payload: object, - data_flow_name: Optional[str] = None, - command_name: Optional[str] = None, + additional_properties: Optional[Dict[str, object]] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + sql_writer_stored_procedure_name: Optional[object] = None, + sql_writer_table_type: Optional[object] = None, + pre_copy_script: Optional[object] = None, + stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None, + stored_procedure_table_type_parameter_name: Optional[object] = None, + table_option: Optional[object] = None, **kwargs ): - super(DataFlowDebugCommandRequest, self).__init__(**kwargs) - self.session_id = session_id - self.data_flow_name = data_flow_name - self.command_name = command_name - self.command_payload = command_payload + super(AzureSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'AzureSqlSink' # type: str + self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name + self.sql_writer_table_type = sql_writer_table_type + self.pre_copy_script = pre_copy_script + self.stored_procedure_parameters = stored_procedure_parameters + self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name + self.table_option = table_option -class DataFlowDebugCommandResponse(msrest.serialization.Model): - """Response body structure of data flow result for data preview, statistics or expression preview. +class AzureSqlSource(TabularSource): + """A copy activity Azure SQL source. - :param status: The run status of data preview, statistics or expression preview. - :type status: str - :param data: The result data of data preview, statistics or expression preview. - :type data: str + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database + source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression + with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored procedure parameters. + Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.synapse.artifacts.models.StoredProcedureParameter] + :param produce_additional_types: Which additional types to produce. + :type produce_additional_types: object """ + _validation = { + 'type': {'required': True}, + } + _attribute_map = { - 'status': {'key': 'status', 'type': 'str'}, - 'data': {'key': 'data', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, } def __init__( self, *, - status: Optional[str] = None, - data: Optional[str] = None, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + sql_reader_query: Optional[object] = None, + sql_reader_stored_procedure_name: Optional[object] = None, + stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None, + produce_additional_types: Optional[object] = None, **kwargs ): - super(DataFlowDebugCommandResponse, self).__init__(**kwargs) - self.status = status - self.data = data + super(AzureSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'AzureSqlSource' # type: str + self.sql_reader_query = sql_reader_query + self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name + self.stored_procedure_parameters = stored_procedure_parameters + self.produce_additional_types = produce_additional_types -class DataFlowDebugPackage(msrest.serialization.Model): - """Request body structure for starting data flow debug session. +class AzureSqlTableDataset(Dataset): + """The Azure SQL Server database dataset. + + All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param session_id: The ID of data flow debug session. - :type session_id: str - :param data_flow: Data flow instance. - :type data_flow: ~azure.synapse.artifacts.models.DataFlowDebugResource - :param datasets: List of datasets. - :type datasets: list[~azure.synapse.artifacts.models.DatasetDebugResource] - :param linked_services: List of linked services. - :type linked_services: list[~azure.synapse.artifacts.models.LinkedServiceDebugResource] - :param staging: Staging info for debug session. - :type staging: ~azure.synapse.artifacts.models.DataFlowStagingInfo - :param debug_settings: Data flow debug settings. - :type debug_settings: ~azure.synapse.artifacts.models.DataFlowDebugPackageDebugSettings + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: This property will be retired. Please consider using schema + table + properties instead. + :type table_name: object + :param schema_type_properties_schema: The schema name of the Azure SQL database. Type: string + (or Expression with resultType string). + :type schema_type_properties_schema: object + :param table: The table name of the Azure SQL database. Type: string (or Expression with + resultType string). + :type table: object """ + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'session_id': {'key': 'sessionId', 'type': 'str'}, - 'data_flow': {'key': 'dataFlow', 'type': 'DataFlowDebugResource'}, - 'datasets': {'key': 'datasets', 'type': '[DatasetDebugResource]'}, - 'linked_services': {'key': 'linkedServices', 'type': '[LinkedServiceDebugResource]'}, - 'staging': {'key': 'staging', 'type': 'DataFlowStagingInfo'}, - 'debug_settings': {'key': 'debugSettings', 'type': 'DataFlowDebugPackageDebugSettings'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, } def __init__( self, *, + linked_service_name: "LinkedServiceReference", additional_properties: Optional[Dict[str, object]] = None, - session_id: Optional[str] = None, - data_flow: Optional["DataFlowDebugResource"] = None, - datasets: Optional[List["DatasetDebugResource"]] = None, - linked_services: Optional[List["LinkedServiceDebugResource"]] = None, - staging: Optional["DataFlowStagingInfo"] = None, - debug_settings: Optional["DataFlowDebugPackageDebugSettings"] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + table_name: Optional[object] = None, + schema_type_properties_schema: Optional[object] = None, + table: Optional[object] = None, **kwargs ): - super(DataFlowDebugPackage, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.session_id = session_id - self.data_flow = data_flow - self.datasets = datasets - self.linked_services = linked_services - self.staging = staging - self.debug_settings = debug_settings + super(AzureSqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'AzureSqlTable' # type: str + self.table_name = table_name + self.schema_type_properties_schema = schema_type_properties_schema + self.table = table -class DataFlowDebugPackageDebugSettings(msrest.serialization.Model): - """Data flow debug settings. +class AzureStorageLinkedService(LinkedService): + """The storage account linked service. - :param source_settings: Source setting for data flow debug. - :type source_settings: list[~azure.synapse.artifacts.models.DataFlowSourceSetting] - :param parameters: Data flow parameters. - :type parameters: dict[str, object] - :param dataset_parameters: Parameters for dataset. - :type dataset_parameters: object + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param connection_string: The connection string. It is mutually exclusive with sasUri property. + Type: string, SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param account_key: The Azure key vault secret reference of accountKey in connection string. + :type account_key: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param sas_uri: SAS URI of the Azure Storage resource. It is mutually exclusive with + connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. + :type sas_uri: object + :param sas_token: The Azure key vault secret reference of sasToken in sas uri. + :type sas_token: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: str """ + _validation = { + 'type': {'required': True}, + } + _attribute_map = { - 'source_settings': {'key': 'sourceSettings', 'type': '[DataFlowSourceSetting]'}, - 'parameters': {'key': 'parameters', 'type': '{object}'}, - 'dataset_parameters': {'key': 'datasetParameters', 'type': 'object'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, + 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, + 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, } def __init__( self, *, - source_settings: Optional[List["DataFlowSourceSetting"]] = None, - parameters: Optional[Dict[str, object]] = None, - dataset_parameters: Optional[object] = None, + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + connection_string: Optional[object] = None, + account_key: Optional["AzureKeyVaultSecretReference"] = None, + sas_uri: Optional[object] = None, + sas_token: Optional["AzureKeyVaultSecretReference"] = None, + encrypted_credential: Optional[str] = None, **kwargs ): - super(DataFlowDebugPackageDebugSettings, self).__init__(**kwargs) - self.source_settings = source_settings - self.parameters = parameters - self.dataset_parameters = dataset_parameters + super(AzureStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'AzureStorage' # type: str + self.connection_string = connection_string + self.account_key = account_key + self.sas_uri = sas_uri + self.sas_token = sas_token + self.encrypted_credential = encrypted_credential -class DataFlowDebugPreviewDataRequest(msrest.serialization.Model): - """Request body structure for data flow preview data. +class AzureTableDataset(Dataset): + """The Azure Table storage dataset. - :param session_id: The ID of data flow debug session. - :type session_id: str - :param data_flow_name: The data flow which contains the debug session. - :type data_flow_name: str - :param stream_name: The output stream name. - :type stream_name: str - :param row_limits: The row limit for preview request. - :type row_limits: int + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: Required. The table name of the Azure Table storage. Type: string (or + Expression with resultType string). + :type table_name: object """ + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + 'table_name': {'required': True}, + } + _attribute_map = { - 'session_id': {'key': 'sessionId', 'type': 'str'}, - 'data_flow_name': {'key': 'dataFlowName', 'type': 'str'}, - 'stream_name': {'key': 'streamName', 'type': 'str'}, - 'row_limits': {'key': 'rowLimits', 'type': 'int'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } def __init__( self, *, - session_id: Optional[str] = None, - data_flow_name: Optional[str] = None, - stream_name: Optional[str] = None, - row_limits: Optional[int] = None, - **kwargs - ): - super(DataFlowDebugPreviewDataRequest, self).__init__(**kwargs) - self.session_id = session_id - self.data_flow_name = data_flow_name - self.stream_name = stream_name - self.row_limits = row_limits + linked_service_name: "LinkedServiceReference", + table_name: object, + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + **kwargs + ): + super(AzureTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'AzureTable' # type: str + self.table_name = table_name -class DataFlowDebugQueryResponse(msrest.serialization.Model): - """Response body structure of data flow query for data preview, statistics or expression preview. +class AzureTableSink(CopySink): + """A copy activity Azure Table sink. - :param run_id: The run ID of data flow debug session. - :type run_id: str + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param azure_table_default_partition_key_value: Azure Table default partition key value. Type: + string (or Expression with resultType string). + :type azure_table_default_partition_key_value: object + :param azure_table_partition_key_name: Azure Table partition key name. Type: string (or + Expression with resultType string). + :type azure_table_partition_key_name: object + :param azure_table_row_key_name: Azure Table row key name. Type: string (or Expression with + resultType string). + :type azure_table_row_key_name: object + :param azure_table_insert_type: Azure Table insert type. Type: string (or Expression with + resultType string). + :type azure_table_insert_type: object """ + _validation = { + 'type': {'required': True}, + } + _attribute_map = { - 'run_id': {'key': 'runId', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'azure_table_default_partition_key_value': {'key': 'azureTableDefaultPartitionKeyValue', 'type': 'object'}, + 'azure_table_partition_key_name': {'key': 'azureTablePartitionKeyName', 'type': 'object'}, + 'azure_table_row_key_name': {'key': 'azureTableRowKeyName', 'type': 'object'}, + 'azure_table_insert_type': {'key': 'azureTableInsertType', 'type': 'object'}, } def __init__( self, *, - run_id: Optional[str] = None, + additional_properties: Optional[Dict[str, object]] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + azure_table_default_partition_key_value: Optional[object] = None, + azure_table_partition_key_name: Optional[object] = None, + azure_table_row_key_name: Optional[object] = None, + azure_table_insert_type: Optional[object] = None, **kwargs ): - super(DataFlowDebugQueryResponse, self).__init__(**kwargs) - self.run_id = run_id + super(AzureTableSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'AzureTableSink' # type: str + self.azure_table_default_partition_key_value = azure_table_default_partition_key_value + self.azure_table_partition_key_name = azure_table_partition_key_name + self.azure_table_row_key_name = azure_table_row_key_name + self.azure_table_insert_type = azure_table_insert_type -class SubResourceDebugResource(msrest.serialization.Model): - """Azure Synapse nested debug resource. +class AzureTableSource(TabularSource): + """A copy activity Azure Table source. - :param name: The resource name. - :type name: str + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param azure_table_source_query: Azure Table source query. Type: string (or Expression with + resultType string). + :type azure_table_source_query: object + :param azure_table_source_ignore_table_not_found: Azure Table source ignore table not found. + Type: boolean (or Expression with resultType boolean). + :type azure_table_source_ignore_table_not_found: object """ + _validation = { + 'type': {'required': True}, + } + _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'azure_table_source_query': {'key': 'azureTableSourceQuery', 'type': 'object'}, + 'azure_table_source_ignore_table_not_found': {'key': 'azureTableSourceIgnoreTableNotFound', 'type': 'object'}, } def __init__( self, *, - name: Optional[str] = None, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + azure_table_source_query: Optional[object] = None, + azure_table_source_ignore_table_not_found: Optional[object] = None, **kwargs ): - super(SubResourceDebugResource, self).__init__(**kwargs) - self.name = name + super(AzureTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'AzureTableSource' # type: str + self.azure_table_source_query = azure_table_source_query + self.azure_table_source_ignore_table_not_found = azure_table_source_ignore_table_not_found -class DataFlowDebugResource(SubResourceDebugResource): - """Data flow debug resource. +class AzureTableStorageLinkedService(LinkedService): + """The azure table storage linked service. All required parameters must be populated in order to send to Azure. - :param name: The resource name. - :type name: str - :param properties: Required. Data flow properties. - :type properties: ~azure.synapse.artifacts.models.DataFlow + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param connection_string: The connection string. It is mutually exclusive with sasUri property. + Type: string, SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param account_key: The Azure key vault secret reference of accountKey in connection string. + :type account_key: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param sas_uri: SAS URI of the Azure Storage resource. It is mutually exclusive with + connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. + :type sas_uri: object + :param sas_token: The Azure key vault secret reference of sasToken in sas uri. + :type sas_token: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: str """ _validation = { - 'properties': {'required': True}, + 'type': {'required': True}, } _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'DataFlow'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, + 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, + 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, } def __init__( self, *, - properties: "DataFlow", - name: Optional[str] = None, + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + connection_string: Optional[object] = None, + account_key: Optional["AzureKeyVaultSecretReference"] = None, + sas_uri: Optional[object] = None, + sas_token: Optional["AzureKeyVaultSecretReference"] = None, + encrypted_credential: Optional[str] = None, **kwargs ): - super(DataFlowDebugResource, self).__init__(name=name, **kwargs) - self.properties = properties + super(AzureTableStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'AzureTableStorage' # type: str + self.connection_string = connection_string + self.account_key = account_key + self.sas_uri = sas_uri + self.sas_token = sas_token + self.encrypted_credential = encrypted_credential -class DataFlowDebugResultResponse(msrest.serialization.Model): - """Response body structure of data flow result for data preview, statistics or expression preview. +class BigDataPoolReference(msrest.serialization.Model): + """Big data pool reference. - :param status: The run status of data preview, statistics or expression preview. - :type status: str - :param data: The result data of data preview, statistics or expression preview. - :type data: str + All required parameters must be populated in order to send to Azure. + + :param type: Required. Big data pool reference type. Possible values include: + "BigDataPoolReference". + :type type: str or ~azure.synapse.artifacts.models.BigDataPoolReferenceType + :param reference_name: Required. Reference big data pool name. + :type reference_name: str """ + _validation = { + 'type': {'required': True}, + 'reference_name': {'required': True}, + } + _attribute_map = { - 'status': {'key': 'status', 'type': 'str'}, - 'data': {'key': 'data', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, } def __init__( self, *, - status: Optional[str] = None, - data: Optional[str] = None, + type: Union[str, "BigDataPoolReferenceType"], + reference_name: str, **kwargs ): - super(DataFlowDebugResultResponse, self).__init__(**kwargs) - self.status = status - self.data = data + super(BigDataPoolReference, self).__init__(**kwargs) + self.type = type + self.reference_name = reference_name -class DataFlowDebugSessionInfo(msrest.serialization.Model): - """Data flow debug session info. +class TrackedResource(Resource): + """The resource model definition for a ARM tracked top level resource. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param data_flow_name: The name of the data flow. - :type data_flow_name: str - :param compute_type: Compute type of the cluster. - :type compute_type: str - :param core_count: Core count of the cluster. - :type core_count: int - :param node_count: Node count of the cluster. (deprecated property). - :type node_count: int - :param integration_runtime_name: Attached integration runtime name of data flow debug session. - :type integration_runtime_name: str - :param session_id: The ID of data flow debug session. - :type session_id: str - :param start_time: Start time of data flow debug session. - :type start_time: str - :param time_to_live_in_minutes: Compute type of the cluster. - :type time_to_live_in_minutes: int - :param last_activity_time: Last activity time of data flow debug session. - :type last_activity_time: str + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + :param tags: A set of tags. Resource tags. + :type tags: dict[str, str] + :param location: Required. The geo-location where the resource lives. + :type location: str """ + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'location': {'required': True}, + } + _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'data_flow_name': {'key': 'dataFlowName', 'type': 'str'}, - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'core_count': {'key': 'coreCount', 'type': 'int'}, - 'node_count': {'key': 'nodeCount', 'type': 'int'}, - 'integration_runtime_name': {'key': 'integrationRuntimeName', 'type': 'str'}, - 'session_id': {'key': 'sessionId', 'type': 'str'}, - 'start_time': {'key': 'startTime', 'type': 'str'}, - 'time_to_live_in_minutes': {'key': 'timeToLiveInMinutes', 'type': 'int'}, - 'last_activity_time': {'key': 'lastActivityTime', 'type': 'str'}, + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'location': {'key': 'location', 'type': 'str'}, } def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - data_flow_name: Optional[str] = None, - compute_type: Optional[str] = None, - core_count: Optional[int] = None, - node_count: Optional[int] = None, - integration_runtime_name: Optional[str] = None, - session_id: Optional[str] = None, - start_time: Optional[str] = None, - time_to_live_in_minutes: Optional[int] = None, - last_activity_time: Optional[str] = None, + location: str, + tags: Optional[Dict[str, str]] = None, **kwargs ): - super(DataFlowDebugSessionInfo, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.data_flow_name = data_flow_name - self.compute_type = compute_type - self.core_count = core_count - self.node_count = node_count - self.integration_runtime_name = integration_runtime_name - self.session_id = session_id - self.start_time = start_time - self.time_to_live_in_minutes = time_to_live_in_minutes - self.last_activity_time = last_activity_time + super(TrackedResource, self).__init__(**kwargs) + self.tags = tags + self.location = location -class DataFlowDebugStatisticsRequest(msrest.serialization.Model): - """Request body structure for data flow statistics. +class BigDataPoolResourceInfo(TrackedResource): + """A Big Data pool. - :param session_id: The ID of data flow debug session. - :type session_id: str - :param data_flow_name: The data flow which contains the debug session. - :type data_flow_name: str - :param stream_name: The output stream name. - :type stream_name: str - :param columns: List of column names. - :type columns: list[str] + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + :param tags: A set of tags. Resource tags. + :type tags: dict[str, str] + :param location: Required. The geo-location where the resource lives. + :type location: str + :param provisioning_state: The state of the Big Data pool. + :type provisioning_state: str + :param auto_scale: Auto-scaling properties. + :type auto_scale: ~azure.synapse.artifacts.models.AutoScaleProperties + :param creation_date: The time when the Big Data pool was created. + :type creation_date: ~datetime.datetime + :param auto_pause: Auto-pausing properties. + :type auto_pause: ~azure.synapse.artifacts.models.AutoPauseProperties + :param is_compute_isolation_enabled: Whether compute isolation is required or not. + :type is_compute_isolation_enabled: bool + :param spark_events_folder: The Spark events folder. + :type spark_events_folder: str + :param node_count: The number of nodes in the Big Data pool. + :type node_count: int + :param library_requirements: Library version requirements. + :type library_requirements: ~azure.synapse.artifacts.models.LibraryRequirements + :param spark_version: The Apache Spark version. + :type spark_version: str + :param default_spark_log_folder: The default folder where Spark logs will be written. + :type default_spark_log_folder: str + :param node_size: The level of compute power that each node in the Big Data pool has. Possible + values include: "None", "Small", "Medium", "Large", "XLarge", "XXLarge". + :type node_size: str or ~azure.synapse.artifacts.models.NodeSize + :param node_size_family: The kind of nodes that the Big Data pool provides. Possible values + include: "None", "MemoryOptimized". + :type node_size_family: str or ~azure.synapse.artifacts.models.NodeSizeFamily """ + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'location': {'required': True}, + } + _attribute_map = { - 'session_id': {'key': 'sessionId', 'type': 'str'}, - 'data_flow_name': {'key': 'dataFlowName', 'type': 'str'}, - 'stream_name': {'key': 'streamName', 'type': 'str'}, - 'columns': {'key': 'columns', 'type': '[str]'}, + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'location': {'key': 'location', 'type': 'str'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + 'auto_scale': {'key': 'properties.autoScale', 'type': 'AutoScaleProperties'}, + 'creation_date': {'key': 'properties.creationDate', 'type': 'iso-8601'}, + 'auto_pause': {'key': 'properties.autoPause', 'type': 'AutoPauseProperties'}, + 'is_compute_isolation_enabled': {'key': 'properties.isComputeIsolationEnabled', 'type': 'bool'}, + 'spark_events_folder': {'key': 'properties.sparkEventsFolder', 'type': 'str'}, + 'node_count': {'key': 'properties.nodeCount', 'type': 'int'}, + 'library_requirements': {'key': 'properties.libraryRequirements', 'type': 'LibraryRequirements'}, + 'spark_version': {'key': 'properties.sparkVersion', 'type': 'str'}, + 'default_spark_log_folder': {'key': 'properties.defaultSparkLogFolder', 'type': 'str'}, + 'node_size': {'key': 'properties.nodeSize', 'type': 'str'}, + 'node_size_family': {'key': 'properties.nodeSizeFamily', 'type': 'str'}, } def __init__( self, *, - session_id: Optional[str] = None, - data_flow_name: Optional[str] = None, - stream_name: Optional[str] = None, - columns: Optional[List[str]] = None, - **kwargs - ): - super(DataFlowDebugStatisticsRequest, self).__init__(**kwargs) - self.session_id = session_id - self.data_flow_name = data_flow_name - self.stream_name = stream_name - self.columns = columns + location: str, + tags: Optional[Dict[str, str]] = None, + provisioning_state: Optional[str] = None, + auto_scale: Optional["AutoScaleProperties"] = None, + creation_date: Optional[datetime.datetime] = None, + auto_pause: Optional["AutoPauseProperties"] = None, + is_compute_isolation_enabled: Optional[bool] = None, + spark_events_folder: Optional[str] = None, + node_count: Optional[int] = None, + library_requirements: Optional["LibraryRequirements"] = None, + spark_version: Optional[str] = None, + default_spark_log_folder: Optional[str] = None, + node_size: Optional[Union[str, "NodeSize"]] = None, + node_size_family: Optional[Union[str, "NodeSizeFamily"]] = None, + **kwargs + ): + super(BigDataPoolResourceInfo, self).__init__(tags=tags, location=location, **kwargs) + self.provisioning_state = provisioning_state + self.auto_scale = auto_scale + self.creation_date = creation_date + self.auto_pause = auto_pause + self.is_compute_isolation_enabled = is_compute_isolation_enabled + self.spark_events_folder = spark_events_folder + self.node_count = node_count + self.library_requirements = library_requirements + self.spark_version = spark_version + self.default_spark_log_folder = default_spark_log_folder + self.node_size = node_size + self.node_size_family = node_size_family -class DataFlowFolder(msrest.serialization.Model): - """The folder that this data flow is in. If not specified, Data flow will appear at the root level. +class BigDataPoolResourceInfoListResult(msrest.serialization.Model): + """Collection of Big Data pool information. - :param name: The name of the folder that this data flow is in. - :type name: str + :param next_link: Link to the next page of results. + :type next_link: str + :param value: List of Big Data pools. + :type value: list[~azure.synapse.artifacts.models.BigDataPoolResourceInfo] """ _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'value': {'key': 'value', 'type': '[BigDataPoolResourceInfo]'}, } def __init__( self, *, - name: Optional[str] = None, + next_link: Optional[str] = None, + value: Optional[List["BigDataPoolResourceInfo"]] = None, **kwargs ): - super(DataFlowFolder, self).__init__(**kwargs) - self.name = name + super(BigDataPoolResourceInfoListResult, self).__init__(**kwargs) + self.next_link = next_link + self.value = value -class DataFlowListResponse(msrest.serialization.Model): - """A list of data flow resources. +class BinaryDataset(Dataset): + """Binary dataset. All required parameters must be populated in order to send to Azure. - :param value: Required. List of data flows. - :type value: list[~azure.synapse.artifacts.models.DataFlowResource] - :param next_link: The link to the next page of results, if any remaining results exist. - :type next_link: str + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param location: The location of the Binary storage. + :type location: ~azure.synapse.artifacts.models.DatasetLocation + :param compression: The data compression method used for the binary dataset. + :type compression: ~azure.synapse.artifacts.models.DatasetCompression """ _validation = { - 'value': {'required': True}, + 'type': {'required': True}, + 'linked_service_name': {'required': True}, } _attribute_map = { - 'value': {'key': 'value', 'type': '[DataFlowResource]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, } def __init__( self, *, - value: List["DataFlowResource"], - next_link: Optional[str] = None, + linked_service_name: "LinkedServiceReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + location: Optional["DatasetLocation"] = None, + compression: Optional["DatasetCompression"] = None, **kwargs ): - super(DataFlowListResponse, self).__init__(**kwargs) - self.value = value - self.next_link = next_link + super(BinaryDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'Binary' # type: str + self.location = location + self.compression = compression -class DataFlowReference(msrest.serialization.Model): - """Data flow reference type. +class BinarySink(CopySink): + """A copy activity Binary sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Data flow reference type. Possible values include: "DataFlowReference". - :type type: str or ~azure.synapse.artifacts.models.DataFlowReferenceType - :param reference_name: Required. Reference data flow name. - :type reference_name: str - :param dataset_parameters: Reference data flow parameters from dataset. - :type dataset_parameters: object + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param store_settings: Binary store settings. + :type store_settings: ~azure.synapse.artifacts.models.StoreWriteSettings """ _validation = { 'type': {'required': True}, - 'reference_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'reference_name': {'key': 'referenceName', 'type': 'str'}, - 'dataset_parameters': {'key': 'datasetParameters', 'type': 'object'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, } def __init__( self, *, - type: Union[str, "DataFlowReferenceType"], - reference_name: str, additional_properties: Optional[Dict[str, object]] = None, - dataset_parameters: Optional[object] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + store_settings: Optional["StoreWriteSettings"] = None, **kwargs ): - super(DataFlowReference, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.type = type - self.reference_name = reference_name - self.dataset_parameters = dataset_parameters + super(BinarySink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'BinarySink' # type: str + self.store_settings = store_settings -class SubResource(msrest.serialization.Model): - """Azure Synapse nested resource, which belongs to a workspace. +class BinarySource(CopySource): + """A copy activity Binary source. - Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param store_settings: Binary store settings. + :type store_settings: ~azure.synapse.artifacts.models.StoreReadSettings """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, + 'type': {'required': True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, } def __init__( self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + store_settings: Optional["StoreReadSettings"] = None, **kwargs ): - super(SubResource, self).__init__(**kwargs) - self.id = None - self.name = None - self.type = None - self.etag = None + super(BinarySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'BinarySource' # type: str + self.store_settings = store_settings -class DataFlowResource(SubResource): - """Data flow resource type. +class Trigger(msrest.serialization.Model): + """Azure Synapse nested object which contains information about creating pipeline run. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: ChainingTrigger, MultiplePipelineTrigger, RerunTumblingWindowTrigger, TumblingWindowTrigger. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - :param properties: Required. Data flow properties. - :type properties: ~azure.synapse.artifacts.models.DataFlow + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Trigger type.Constant filled by server. + :type type: str + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are + called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". + :vartype runtime_state: str or ~azure.synapse.artifacts.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the trigger. + :type annotations: list[object] """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'properties': {'required': True}, + 'type': {'required': True}, + 'runtime_state': {'readonly': True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'DataFlow'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + } + + _subtype_map = { + 'type': {'ChainingTrigger': 'ChainingTrigger', 'MultiplePipelineTrigger': 'MultiplePipelineTrigger', 'RerunTumblingWindowTrigger': 'RerunTumblingWindowTrigger', 'TumblingWindowTrigger': 'TumblingWindowTrigger'} } def __init__( self, *, - properties: "DataFlow", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + annotations: Optional[List[object]] = None, **kwargs ): - super(DataFlowResource, self).__init__(**kwargs) - self.properties = properties + super(Trigger, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = 'Trigger' # type: str + self.description = description + self.runtime_state = None + self.annotations = annotations -class Transformation(msrest.serialization.Model): - """A data flow transformation. +class MultiplePipelineTrigger(Trigger): + """Base class for all triggers that support one to many model for trigger to pipeline. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: BlobEventsTrigger, BlobTrigger, ScheduleTrigger. + + Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :param name: Required. Transformation name. - :type name: str - :param description: Transformation description. + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Trigger type.Constant filled by server. + :type type: str + :param description: Trigger description. :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are + called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". + :vartype runtime_state: str or ~azure.synapse.artifacts.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the trigger. + :type annotations: list[object] + :param pipelines: Pipelines that need to be started. + :type pipelines: list[~azure.synapse.artifacts.models.TriggerPipelineReference] """ _validation = { - 'name': {'required': True}, + 'type': {'required': True}, + 'runtime_state': {'readonly': True}, } _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, + } + + _subtype_map = { + 'type': {'BlobEventsTrigger': 'BlobEventsTrigger', 'BlobTrigger': 'BlobTrigger', 'ScheduleTrigger': 'ScheduleTrigger'} } def __init__( self, *, - name: str, + additional_properties: Optional[Dict[str, object]] = None, description: Optional[str] = None, + annotations: Optional[List[object]] = None, + pipelines: Optional[List["TriggerPipelineReference"]] = None, **kwargs ): - super(Transformation, self).__init__(**kwargs) - self.name = name - self.description = description + super(MultiplePipelineTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) + self.type = 'MultiplePipelineTrigger' # type: str + self.pipelines = pipelines -class DataFlowSink(Transformation): - """Transformation for data flow sink. +class BlobEventsTrigger(MultiplePipelineTrigger): + """Trigger that runs every time a Blob event occurs. + + Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :param name: Required. Transformation name. - :type name: str - :param description: Transformation description. + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Trigger type.Constant filled by server. + :type type: str + :param description: Trigger description. :type description: str - :param dataset: Dataset reference. - :type dataset: ~azure.synapse.artifacts.models.DatasetReference + :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are + called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". + :vartype runtime_state: str or ~azure.synapse.artifacts.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the trigger. + :type annotations: list[object] + :param pipelines: Pipelines that need to be started. + :type pipelines: list[~azure.synapse.artifacts.models.TriggerPipelineReference] + :param blob_path_begins_with: The blob path must begin with the pattern provided for trigger to + fire. For example, '/records/blobs/december/' will only fire the trigger for blobs in the + december folder under the records container. At least one of these must be provided: + blobPathBeginsWith, blobPathEndsWith. + :type blob_path_begins_with: str + :param blob_path_ends_with: The blob path must end with the pattern provided for trigger to + fire. For example, 'december/boxes.csv' will only fire the trigger for blobs named boxes in a + december folder. At least one of these must be provided: blobPathBeginsWith, blobPathEndsWith. + :type blob_path_ends_with: str + :param ignore_empty_blobs: If set to true, blobs with zero bytes will be ignored. + :type ignore_empty_blobs: bool + :param events: Required. The type of events that cause this trigger to fire. + :type events: list[str or ~azure.synapse.artifacts.models.BlobEventTypes] + :param scope: Required. The ARM resource ID of the Storage Account. + :type scope: str """ _validation = { - 'name': {'required': True}, + 'type': {'required': True}, + 'runtime_state': {'readonly': True}, + 'events': {'required': True}, + 'scope': {'required': True}, } _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, - 'dataset': {'key': 'dataset', 'type': 'DatasetReference'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, + 'blob_path_begins_with': {'key': 'typeProperties.blobPathBeginsWith', 'type': 'str'}, + 'blob_path_ends_with': {'key': 'typeProperties.blobPathEndsWith', 'type': 'str'}, + 'ignore_empty_blobs': {'key': 'typeProperties.ignoreEmptyBlobs', 'type': 'bool'}, + 'events': {'key': 'typeProperties.events', 'type': '[str]'}, + 'scope': {'key': 'typeProperties.scope', 'type': 'str'}, } def __init__( self, *, - name: str, + events: List[Union[str, "BlobEventTypes"]], + scope: str, + additional_properties: Optional[Dict[str, object]] = None, description: Optional[str] = None, - dataset: Optional["DatasetReference"] = None, + annotations: Optional[List[object]] = None, + pipelines: Optional[List["TriggerPipelineReference"]] = None, + blob_path_begins_with: Optional[str] = None, + blob_path_ends_with: Optional[str] = None, + ignore_empty_blobs: Optional[bool] = None, **kwargs ): - super(DataFlowSink, self).__init__(name=name, description=description, **kwargs) - self.dataset = dataset + super(BlobEventsTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, pipelines=pipelines, **kwargs) + self.type = 'BlobEventsTrigger' # type: str + self.blob_path_begins_with = blob_path_begins_with + self.blob_path_ends_with = blob_path_ends_with + self.ignore_empty_blobs = ignore_empty_blobs + self.events = events + self.scope = scope -class DataFlowSource(Transformation): - """Transformation for data flow source. +class BlobSink(CopySink): + """A copy activity Azure Blob sink. All required parameters must be populated in order to send to Azure. - :param name: Required. Transformation name. - :type name: str - :param description: Transformation description. - :type description: str - :param dataset: Dataset reference. - :type dataset: ~azure.synapse.artifacts.models.DatasetReference + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param blob_writer_overwrite_files: Blob writer overwrite files. Type: boolean (or Expression + with resultType boolean). + :type blob_writer_overwrite_files: object + :param blob_writer_date_time_format: Blob writer date time format. Type: string (or Expression + with resultType string). + :type blob_writer_date_time_format: object + :param blob_writer_add_header: Blob writer add header. Type: boolean (or Expression with + resultType boolean). + :type blob_writer_add_header: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object """ _validation = { - 'name': {'required': True}, + 'type': {'required': True}, } _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'dataset': {'key': 'dataset', 'type': 'DatasetReference'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'blob_writer_overwrite_files': {'key': 'blobWriterOverwriteFiles', 'type': 'object'}, + 'blob_writer_date_time_format': {'key': 'blobWriterDateTimeFormat', 'type': 'object'}, + 'blob_writer_add_header': {'key': 'blobWriterAddHeader', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } def __init__( self, *, - name: str, - description: Optional[str] = None, - dataset: Optional["DatasetReference"] = None, + additional_properties: Optional[Dict[str, object]] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + blob_writer_overwrite_files: Optional[object] = None, + blob_writer_date_time_format: Optional[object] = None, + blob_writer_add_header: Optional[object] = None, + copy_behavior: Optional[object] = None, **kwargs ): - super(DataFlowSource, self).__init__(name=name, description=description, **kwargs) - self.dataset = dataset + super(BlobSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'BlobSink' # type: str + self.blob_writer_overwrite_files = blob_writer_overwrite_files + self.blob_writer_date_time_format = blob_writer_date_time_format + self.blob_writer_add_header = blob_writer_add_header + self.copy_behavior = copy_behavior -class DataFlowSourceSetting(msrest.serialization.Model): - """Definition of data flow source setting for debug. +class BlobSource(CopySource): + """A copy activity Azure Blob source. + + All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param source_name: The data flow source name. - :type source_name: str - :param row_limit: Defines the row limit of data flow source in debug. - :type row_limit: int + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param treat_empty_as_null: Treat empty as null. Type: boolean (or Expression with resultType + boolean). + :type treat_empty_as_null: object + :param skip_header_line_count: Number of header lines to skip from each blob. Type: integer (or + Expression with resultType integer). + :type skip_header_line_count: object + :param recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :type recursive: object """ + _validation = { + 'type': {'required': True}, + } + _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_name': {'key': 'sourceName', 'type': 'str'}, - 'row_limit': {'key': 'rowLimit', 'type': 'int'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, + 'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, } def __init__( self, *, additional_properties: Optional[Dict[str, object]] = None, - source_name: Optional[str] = None, - row_limit: Optional[int] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + treat_empty_as_null: Optional[object] = None, + skip_header_line_count: Optional[object] = None, + recursive: Optional[object] = None, **kwargs ): - super(DataFlowSourceSetting, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.source_name = source_name - self.row_limit = row_limit + super(BlobSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'BlobSource' # type: str + self.treat_empty_as_null = treat_empty_as_null + self.skip_header_line_count = skip_header_line_count + self.recursive = recursive -class DataFlowStagingInfo(msrest.serialization.Model): - """Staging info for execute data flow activity. +class BlobTrigger(MultiplePipelineTrigger): + """Trigger that runs every time the selected Blob container changes. - :param linked_service: Staging linked service reference. - :type linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference - :param folder_path: Folder path for staging blob. - :type folder_path: str - """ + Variables are only populated by the server, and will be ignored when sending a request. - _attribute_map = { - 'linked_service': {'key': 'linkedService', 'type': 'LinkedServiceReference'}, - 'folder_path': {'key': 'folderPath', 'type': 'str'}, - } - - def __init__( - self, - *, - linked_service: Optional["LinkedServiceReference"] = None, - folder_path: Optional[str] = None, - **kwargs - ): - super(DataFlowStagingInfo, self).__init__(**kwargs) - self.linked_service = linked_service - self.folder_path = folder_path - - -class DataLakeAnalyticsUSQLActivity(ExecutionActivity): - """Data Lake Analytics U-SQL activity. - - All required parameters must be populated in order to send to Azure. + All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. Trigger type.Constant filled by server. :type type: str - :param description: Activity description. + :param description: Trigger description. :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param script_path: Required. Case-sensitive path to folder that contains the U-SQL script. - Type: string (or Expression with resultType string). - :type script_path: object - :param script_linked_service: Required. Script linked service reference. - :type script_linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference - :param degree_of_parallelism: The maximum number of nodes simultaneously used to run the job. - Default value is 1. Type: integer (or Expression with resultType integer), minimum: 1. - :type degree_of_parallelism: object - :param priority: Determines which jobs out of all that are queued should be selected to run - first. The lower the number, the higher the priority. Default value is 1000. Type: integer (or - Expression with resultType integer), minimum: 1. - :type priority: object - :param parameters: Parameters for U-SQL job request. - :type parameters: dict[str, object] - :param runtime_version: Runtime version of the U-SQL engine to use. Type: string (or Expression - with resultType string). - :type runtime_version: object - :param compilation_mode: Compilation mode of U-SQL. Must be one of these values : Semantic, - Full and SingleBox. Type: string (or Expression with resultType string). - :type compilation_mode: object + :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are + called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". + :vartype runtime_state: str or ~azure.synapse.artifacts.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the trigger. + :type annotations: list[object] + :param pipelines: Pipelines that need to be started. + :type pipelines: list[~azure.synapse.artifacts.models.TriggerPipelineReference] + :param folder_path: Required. The path of the container/folder that will trigger the pipeline. + :type folder_path: str + :param max_concurrency: Required. The max number of parallel files to handle when it is + triggered. + :type max_concurrency: int + :param linked_service: Required. The Azure Storage linked service reference. + :type linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, - 'script_path': {'required': True}, - 'script_linked_service': {'required': True}, + 'runtime_state': {'readonly': True}, + 'folder_path': {'required': True}, + 'max_concurrency': {'required': True}, + 'linked_service': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'script_path': {'key': 'typeProperties.scriptPath', 'type': 'object'}, - 'script_linked_service': {'key': 'typeProperties.scriptLinkedService', 'type': 'LinkedServiceReference'}, - 'degree_of_parallelism': {'key': 'typeProperties.degreeOfParallelism', 'type': 'object'}, - 'priority': {'key': 'typeProperties.priority', 'type': 'object'}, - 'parameters': {'key': 'typeProperties.parameters', 'type': '{object}'}, - 'runtime_version': {'key': 'typeProperties.runtimeVersion', 'type': 'object'}, - 'compilation_mode': {'key': 'typeProperties.compilationMode', 'type': 'object'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, + 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'str'}, + 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, + 'linked_service': {'key': 'typeProperties.linkedService', 'type': 'LinkedServiceReference'}, } def __init__( self, *, - name: str, - script_path: object, - script_linked_service: "LinkedServiceReference", + folder_path: str, + max_concurrency: int, + linked_service: "LinkedServiceReference", additional_properties: Optional[Dict[str, object]] = None, description: Optional[str] = None, - depends_on: Optional[List["ActivityDependency"]] = None, - user_properties: Optional[List["UserProperty"]] = None, - linked_service_name: Optional["LinkedServiceReference"] = None, - policy: Optional["ActivityPolicy"] = None, - degree_of_parallelism: Optional[object] = None, - priority: Optional[object] = None, - parameters: Optional[Dict[str, object]] = None, - runtime_version: Optional[object] = None, - compilation_mode: Optional[object] = None, + annotations: Optional[List[object]] = None, + pipelines: Optional[List["TriggerPipelineReference"]] = None, **kwargs ): - super(DataLakeAnalyticsUSQLActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type = 'DataLakeAnalyticsU-SQL' # type: str - self.script_path = script_path - self.script_linked_service = script_linked_service - self.degree_of_parallelism = degree_of_parallelism - self.priority = priority - self.parameters = parameters - self.runtime_version = runtime_version - self.compilation_mode = compilation_mode - + super(BlobTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, pipelines=pipelines, **kwargs) + self.type = 'BlobTrigger' # type: str + self.folder_path = folder_path + self.max_concurrency = max_concurrency + self.linked_service = linked_service -class DatasetCompression(msrest.serialization.Model): - """The compression method used on a dataset. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: DatasetBZip2Compression, DatasetDeflateCompression, DatasetGZipCompression, DatasetZipDeflateCompression. +class CassandraLinkedService(LinkedService): + """Linked service for Cassandra data source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset compression.Constant filled by server. + :param type: Required. Type of linked service.Constant filled by server. :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param host: Required. Host name for connection. Type: string (or Expression with resultType + string). + :type host: object + :param authentication_type: AuthenticationType to be used for connection. Type: string (or + Expression with resultType string). + :type authentication_type: object + :param port: The port for the connection. Type: integer (or Expression with resultType + integer). + :type port: object + :param username: Username for authentication. Type: string (or Expression with resultType + string). + :type username: object + :param password: Password for authentication. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object """ _validation = { 'type': {'required': True}, + 'host': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'BZip2': 'DatasetBZip2Compression', 'Deflate': 'DatasetDeflateCompression', 'GZip': 'DatasetGZipCompression', 'ZipDeflate': 'DatasetZipDeflateCompression'} + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, *, + host: object, additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + authentication_type: Optional[object] = None, + port: Optional[object] = None, + username: Optional[object] = None, + password: Optional["SecretBase"] = None, + encrypted_credential: Optional[object] = None, **kwargs ): - super(DatasetCompression, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.type = 'DatasetCompression' # type: str + super(CassandraLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'Cassandra' # type: str + self.host = host + self.authentication_type = authentication_type + self.port = port + self.username = username + self.password = password + self.encrypted_credential = encrypted_credential -class DatasetBZip2Compression(DatasetCompression): - """The BZip2 compression method used on a dataset. +class CassandraSource(TabularSource): + """A copy activity source for a Cassandra database. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset compression.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: Database query. Should be a SQL-92 query expression or Cassandra Query Language + (CQL) command. Type: string (or Expression with resultType string). + :type query: object + :param consistency_level: The consistency level specifies how many Cassandra servers must + respond to a read request before returning data to the client application. Cassandra checks the + specified number of Cassandra servers for data to satisfy the read request. Must be one of + cassandraSourceReadConsistencyLevels. The default value is 'ONE'. It is case-insensitive. + Possible values include: "ALL", "EACH_QUORUM", "QUORUM", "LOCAL_QUORUM", "ONE", "TWO", "THREE", + "LOCAL_ONE", "SERIAL", "LOCAL_SERIAL". + :type consistency_level: str or + ~azure.synapse.artifacts.models.CassandraSourceReadConsistencyLevels """ _validation = { @@ -6818,320 +7641,520 @@ class DatasetBZip2Compression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + 'consistency_level': {'key': 'consistencyLevel', 'type': 'str'}, } def __init__( self, *, additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + consistency_level: Optional[Union[str, "CassandraSourceReadConsistencyLevels"]] = None, **kwargs ): - super(DatasetBZip2Compression, self).__init__(additional_properties=additional_properties, **kwargs) - self.type = 'BZip2' # type: str - - -class DatasetDebugResource(SubResourceDebugResource): - """Dataset debug resource. - - All required parameters must be populated in order to send to Azure. - - :param name: The resource name. - :type name: str - :param properties: Required. Dataset properties. - :type properties: ~azure.synapse.artifacts.models.Dataset - """ - - _validation = { - 'properties': {'required': True}, - } - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'Dataset'}, - } - - def __init__( - self, - *, - properties: "Dataset", - name: Optional[str] = None, - **kwargs - ): - super(DatasetDebugResource, self).__init__(name=name, **kwargs) - self.properties = properties + super(CassandraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'CassandraSource' # type: str + self.query = query + self.consistency_level = consistency_level -class DatasetDeflateCompression(DatasetCompression): - """The Deflate compression method used on a dataset. +class CassandraTableDataset(Dataset): + """The Cassandra database dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset compression.Constant filled by server. + :param type: Required. Type of dataset.Constant filled by server. :type type: str - :param level: The Deflate compression level. Possible values include: "Optimal", "Fastest". - :type level: str or ~azure.synapse.artifacts.models.DatasetCompressionLevel + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: The table name of the Cassandra database. Type: string (or Expression with + resultType string). + :type table_name: object + :param keyspace: The keyspace of the Cassandra database. Type: string (or Expression with + resultType string). + :type keyspace: object """ _validation = { 'type': {'required': True}, + 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'keyspace': {'key': 'typeProperties.keyspace', 'type': 'object'}, } def __init__( self, *, + linked_service_name: "LinkedServiceReference", additional_properties: Optional[Dict[str, object]] = None, - level: Optional[Union[str, "DatasetCompressionLevel"]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + table_name: Optional[object] = None, + keyspace: Optional[object] = None, **kwargs ): - super(DatasetDeflateCompression, self).__init__(additional_properties=additional_properties, **kwargs) - self.type = 'Deflate' # type: str - self.level = level - - -class DatasetFolder(msrest.serialization.Model): - """The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - - :param name: The name of the folder that this Dataset is in. - :type name: str - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - } + super(CassandraTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'CassandraTable' # type: str + self.table_name = table_name + self.keyspace = keyspace - def __init__( - self, - *, - name: Optional[str] = None, - **kwargs - ): - super(DatasetFolder, self).__init__(**kwargs) - self.name = name +class ChainingTrigger(Trigger): + """Trigger that allows the referenced pipeline to depend on other pipeline runs based on runDimension Name/Value pairs. Upstream pipelines should declare the same runDimension Name and their runs should have the values for those runDimensions. The referenced pipeline run would be triggered if the values for the runDimension match for all upstream pipeline runs. -class DatasetGZipCompression(DatasetCompression): - """The GZip compression method used on a dataset. + Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset compression.Constant filled by server. + :param type: Required. Trigger type.Constant filled by server. :type type: str - :param level: The GZip compression level. Possible values include: "Optimal", "Fastest". - :type level: str or ~azure.synapse.artifacts.models.DatasetCompressionLevel + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are + called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". + :vartype runtime_state: str or ~azure.synapse.artifacts.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the trigger. + :type annotations: list[object] + :param pipeline: Required. Pipeline for which runs are created when all upstream pipelines + complete successfully. + :type pipeline: ~azure.synapse.artifacts.models.TriggerPipelineReference + :param depends_on: Required. Upstream Pipelines. + :type depends_on: list[~azure.synapse.artifacts.models.PipelineReference] + :param run_dimension: Required. Run Dimension property that needs to be emitted by upstream + pipelines. + :type run_dimension: str """ _validation = { 'type': {'required': True}, + 'runtime_state': {'readonly': True}, + 'pipeline': {'required': True}, + 'depends_on': {'required': True}, + 'run_dimension': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'pipeline': {'key': 'pipeline', 'type': 'TriggerPipelineReference'}, + 'depends_on': {'key': 'typeProperties.dependsOn', 'type': '[PipelineReference]'}, + 'run_dimension': {'key': 'typeProperties.runDimension', 'type': 'str'}, } def __init__( self, *, + pipeline: "TriggerPipelineReference", + depends_on: List["PipelineReference"], + run_dimension: str, additional_properties: Optional[Dict[str, object]] = None, - level: Optional[Union[str, "DatasetCompressionLevel"]] = None, + description: Optional[str] = None, + annotations: Optional[List[object]] = None, **kwargs ): - super(DatasetGZipCompression, self).__init__(additional_properties=additional_properties, **kwargs) - self.type = 'GZip' # type: str - self.level = level + super(ChainingTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) + self.type = 'ChainingTrigger' # type: str + self.pipeline = pipeline + self.depends_on = depends_on + self.run_dimension = run_dimension -class DatasetListResponse(msrest.serialization.Model): - """A list of dataset resources. +class CloudError(msrest.serialization.Model): + """The object that defines the structure of an Azure Synapse error response. All required parameters must be populated in order to send to Azure. - :param value: Required. List of datasets. - :type value: list[~azure.synapse.artifacts.models.DatasetResource] - :param next_link: The link to the next page of results, if any remaining results exist. - :type next_link: str + :param code: Required. Error code. + :type code: str + :param message: Required. Error message. + :type message: str + :param target: Property name/path in request associated with error. + :type target: str + :param details: Array with additional error details. + :type details: list[~azure.synapse.artifacts.models.CloudError] """ _validation = { - 'value': {'required': True}, + 'code': {'required': True}, + 'message': {'required': True}, } _attribute_map = { - 'value': {'key': 'value', 'type': '[DatasetResource]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'code': {'key': 'error.code', 'type': 'str'}, + 'message': {'key': 'error.message', 'type': 'str'}, + 'target': {'key': 'error.target', 'type': 'str'}, + 'details': {'key': 'error.details', 'type': '[CloudError]'}, } def __init__( self, *, - value: List["DatasetResource"], - next_link: Optional[str] = None, + code: str, + message: str, + target: Optional[str] = None, + details: Optional[List["CloudError"]] = None, **kwargs ): - super(DatasetListResponse, self).__init__(**kwargs) - self.value = value - self.next_link = next_link - + super(CloudError, self).__init__(**kwargs) + self.code = code + self.message = message + self.target = target + self.details = details -class DatasetLocation(msrest.serialization.Model): - """Dataset location. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: . +class CommonDataServiceForAppsEntityDataset(Dataset): + """The Common Data Service for Apps entity dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location.Constant filled by server. + :param type: Required. Type of dataset.Constant filled by server. :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or Expression with - resultType string). - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param entity_name: The logical name of the entity. Type: string (or Expression with resultType string). - :type file_name: object + :type entity_name: object """ _validation = { 'type': {'required': True}, + 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - } - - _subtype_map = { - 'type': {} + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, } def __init__( self, *, + linked_service_name: "LinkedServiceReference", additional_properties: Optional[Dict[str, object]] = None, - folder_path: Optional[object] = None, - file_name: Optional[object] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + entity_name: Optional[object] = None, **kwargs ): - super(DatasetLocation, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.type = 'DatasetLocation' # type: str - self.folder_path = folder_path - self.file_name = file_name + super(CommonDataServiceForAppsEntityDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'CommonDataServiceForAppsEntity' # type: str + self.entity_name = entity_name -class DatasetReference(msrest.serialization.Model): - """Dataset reference type. +class CommonDataServiceForAppsLinkedService(LinkedService): + """Common Data Service for Apps linked service. All required parameters must be populated in order to send to Azure. - :param type: Required. Dataset reference type. Possible values include: "DatasetReference". - :type type: str or ~azure.synapse.artifacts.models.DatasetReferenceType - :param reference_name: Required. Reference dataset name. - :type reference_name: str - :param parameters: Arguments for dataset. - :type parameters: dict[str, object] + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param deployment_type: Required. The deployment type of the Common Data Service for Apps + instance. 'Online' for Common Data Service for Apps Online and 'OnPremisesWithIfd' for Common + Data Service for Apps on-premises with Ifd. Type: string (or Expression with resultType + string). Possible values include: "Online", "OnPremisesWithIfd". + :type deployment_type: str or ~azure.synapse.artifacts.models.DynamicsDeploymentType + :param host_name: The host name of the on-premises Common Data Service for Apps server. The + property is required for on-prem and not allowed for online. Type: string (or Expression with + resultType string). + :type host_name: object + :param port: The port of on-premises Common Data Service for Apps server. The property is + required for on-prem and not allowed for online. Default is 443. Type: integer (or Expression + with resultType integer), minimum: 0. + :type port: object + :param service_uri: The URL to the Microsoft Common Data Service for Apps server. The property + is required for on-line and not allowed for on-prem. Type: string (or Expression with + resultType string). + :type service_uri: object + :param organization_name: The organization name of the Common Data Service for Apps instance. + The property is required for on-prem and required for online when there are more than one + Common Data Service for Apps instances associated with the user. Type: string (or Expression + with resultType string). + :type organization_name: object + :param authentication_type: Required. The authentication type to connect to Common Data Service + for Apps server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario. + 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or + Expression with resultType string). Possible values include: "Office365", "Ifd", + "AADServicePrincipal". + :type authentication_type: str or ~azure.synapse.artifacts.models.DynamicsAuthenticationType + :param username: User name to access the Common Data Service for Apps instance. Type: string + (or Expression with resultType string). + :type username: object + :param password: Password to access the Common Data Service for Apps instance. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param service_principal_id: The client ID of the application in Azure Active Directory used + for Server-To-Server authentication. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_credential_type: The service principal credential type to use in + Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' + for certificate. Type: string (or Expression with resultType string). Possible values include: + "ServicePrincipalKey", "ServicePrincipalCert". + :type service_principal_credential_type: str or + ~azure.synapse.artifacts.models.DynamicsServicePrincipalCredentialType + :param service_principal_credential: The credential of the service principal object in Azure + Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', + servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If + servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only + be AzureKeyVaultSecretReference. + :type service_principal_credential: ~azure.synapse.artifacts.models.SecretBase + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object """ _validation = { 'type': {'required': True}, - 'reference_name': {'required': True}, + 'deployment_type': {'required': True}, + 'authentication_type': {'required': True}, } _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'reference_name': {'key': 'referenceName', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, + 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, + 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'str'}, + 'service_principal_credential': {'key': 'typeProperties.servicePrincipalCredential', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, *, - type: Union[str, "DatasetReferenceType"], - reference_name: str, - parameters: Optional[Dict[str, object]] = None, + deployment_type: Union[str, "DynamicsDeploymentType"], + authentication_type: Union[str, "DynamicsAuthenticationType"], + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + host_name: Optional[object] = None, + port: Optional[object] = None, + service_uri: Optional[object] = None, + organization_name: Optional[object] = None, + username: Optional[object] = None, + password: Optional["SecretBase"] = None, + service_principal_id: Optional[object] = None, + service_principal_credential_type: Optional[Union[str, "DynamicsServicePrincipalCredentialType"]] = None, + service_principal_credential: Optional["SecretBase"] = None, + encrypted_credential: Optional[object] = None, **kwargs ): - super(DatasetReference, self).__init__(**kwargs) - self.type = type - self.reference_name = reference_name - self.parameters = parameters - + super(CommonDataServiceForAppsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'CommonDataServiceForApps' # type: str + self.deployment_type = deployment_type + self.host_name = host_name + self.port = port + self.service_uri = service_uri + self.organization_name = organization_name + self.authentication_type = authentication_type + self.username = username + self.password = password + self.service_principal_id = service_principal_id + self.service_principal_credential_type = service_principal_credential_type + self.service_principal_credential = service_principal_credential + self.encrypted_credential = encrypted_credential -class DatasetResource(SubResource): - """Dataset resource type. - Variables are only populated by the server, and will be ignored when sending a request. +class CommonDataServiceForAppsSink(CopySink): + """A copy activity Common Data Service for Apps sink. All required parameters must be populated in order to send to Azure. - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - :param properties: Required. Dataset properties. - :type properties: ~azure.synapse.artifacts.models.Dataset + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param write_behavior: Required. The write behavior for the operation. Possible values include: + "Upsert". + :type write_behavior: str or ~azure.synapse.artifacts.models.DynamicsSinkWriteBehavior + :param ignore_null_values: The flag indicating whether to ignore null values from input dataset + (except key fields) during write operation. Default is false. Type: boolean (or Expression with + resultType boolean). + :type ignore_null_values: object + :param alternate_key_name: The logical name of the alternate key which will be used when + upserting records. Type: string (or Expression with resultType string). + :type alternate_key_name: object """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'properties': {'required': True}, + 'type': {'required': True}, + 'write_behavior': {'required': True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'Dataset'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, + 'alternate_key_name': {'key': 'alternateKeyName', 'type': 'object'}, } def __init__( self, *, - properties: "Dataset", + write_behavior: Union[str, "DynamicsSinkWriteBehavior"], + additional_properties: Optional[Dict[str, object]] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + ignore_null_values: Optional[object] = None, + alternate_key_name: Optional[object] = None, **kwargs ): - super(DatasetResource, self).__init__(**kwargs) - self.properties = properties + super(CommonDataServiceForAppsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'CommonDataServiceForAppsSink' # type: str + self.write_behavior = write_behavior + self.ignore_null_values = ignore_null_values + self.alternate_key_name = alternate_key_name -class DatasetZipDeflateCompression(DatasetCompression): - """The ZipDeflate compression method used on a dataset. +class CommonDataServiceForAppsSource(CopySource): + """A copy activity Common Data Service for Apps source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset compression.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param level: The ZipDeflate compression level. Possible values include: "Optimal", "Fastest". - :type level: str or ~azure.synapse.artifacts.models.DatasetCompressionLevel + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query: FetchXML is a proprietary query language that is used in Microsoft Common Data + Service for Apps (online & on-premises). Type: string (or Expression with resultType string). + :type query: object """ _validation = { @@ -7141,23 +8164,29 @@ class DatasetZipDeflateCompression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, } def __init__( self, *, additional_properties: Optional[Dict[str, object]] = None, - level: Optional[Union[str, "DatasetCompressionLevel"]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query: Optional[object] = None, **kwargs ): - super(DatasetZipDeflateCompression, self).__init__(additional_properties=additional_properties, **kwargs) - self.type = 'ZipDeflate' # type: str - self.level = level + super(CommonDataServiceForAppsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'CommonDataServiceForAppsSource' # type: str + self.query = query -class Db2LinkedService(LinkedService): - """Linked service for DB2 data source. +class ConcurLinkedService(LinkedService): + """Concur Service linked service. All required parameters must be populated in order to send to Azure. @@ -7174,26 +8203,23 @@ class Db2LinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param server: Required. Server name for connection. Type: string (or Expression with - resultType string). - :type server: object - :param database: Required. Database name for connection. Type: string (or Expression with - resultType string). - :type database: object - :param authentication_type: AuthenticationType to be used for connection. Possible values - include: "Basic". - :type authentication_type: str or ~azure.synapse.artifacts.models.Db2AuthenticationType - :param username: Username for authentication. Type: string (or Expression with resultType - string). + :param client_id: Required. Application client_id supplied by Concur App Management. + :type client_id: object + :param username: Required. The user name that you use to access Concur Service. :type username: object - :param password: Password for authentication. + :param password: The password corresponding to the user name that you provided in the username + field. :type password: ~azure.synapse.artifacts.models.SecretBase - :param package_collection: Under where packages are created when querying database. Type: - string (or Expression with resultType string). - :type package_collection: object - :param certificate_common_name: Certificate Common Name when TLS is enabled. Type: string (or - Expression with resultType string). - :type certificate_common_name: object + :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using + HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name in the server's + certificate to match the host name of the server when connecting over SSL. The default value is + true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of the server when + connecting over SSL. The default value is true. + :type use_peer_verification: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -7202,8 +8228,8 @@ class Db2LinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'server': {'required': True}, - 'database': {'required': True}, + 'client_id': {'required': True}, + 'username': {'required': True}, } _attribute_map = { @@ -7213,48 +8239,45 @@ class Db2LinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'package_collection': {'key': 'typeProperties.packageCollection', 'type': 'object'}, - 'certificate_common_name': {'key': 'typeProperties.certificateCommonName', 'type': 'object'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, *, - server: object, - database: object, + client_id: object, + username: object, additional_properties: Optional[Dict[str, object]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, - authentication_type: Optional[Union[str, "Db2AuthenticationType"]] = None, - username: Optional[object] = None, password: Optional["SecretBase"] = None, - package_collection: Optional[object] = None, - certificate_common_name: Optional[object] = None, + use_encrypted_endpoints: Optional[object] = None, + use_host_verification: Optional[object] = None, + use_peer_verification: Optional[object] = None, encrypted_credential: Optional[object] = None, **kwargs ): - super(Db2LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'Db2' # type: str - self.server = server - self.database = database - self.authentication_type = authentication_type + super(ConcurLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'Concur' # type: str + self.client_id = client_id self.username = username self.password = password - self.package_collection = package_collection - self.certificate_common_name = certificate_common_name + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification self.encrypted_credential = encrypted_credential -class Db2TableDataset(Dataset): - """The Db2 table dataset. +class ConcurObjectDataset(Dataset): + """Concur Service dataset. All required parameters must be populated in order to send to Azure. @@ -7280,14 +8303,8 @@ class Db2TableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table - properties instead. + :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object - :param schema_type_properties_schema: The Db2 schema name. Type: string (or Expression with - resultType string). - :type schema_type_properties_schema: object - :param table: The Db2 table name. Type: string (or Expression with resultType string). - :type table: object """ _validation = { @@ -7306,8 +8323,6 @@ class Db2TableDataset(Dataset): 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, } def __init__( @@ -7322,324 +8337,289 @@ def __init__( annotations: Optional[List[object]] = None, folder: Optional["DatasetFolder"] = None, table_name: Optional[object] = None, - schema_type_properties_schema: Optional[object] = None, - table: Optional[object] = None, **kwargs ): - super(Db2TableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'Db2Table' # type: str + super(ConcurObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'ConcurObject' # type: str self.table_name = table_name - self.schema_type_properties_schema = schema_type_properties_schema - self.table = table -class DeleteActivity(ExecutionActivity): - """Delete activity. +class ConcurSource(TabularSource): + """A copy activity Concur Service source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param recursive: If true, files or sub-folders under current folder path will be deleted - recursively. Default is false. Type: boolean (or Expression with resultType boolean). - :type recursive: object - :param max_concurrent_connections: The max concurrent connections to connect data source at the - same time. - :type max_concurrent_connections: int - :param enable_logging: Whether to record detailed logs of delete-activity execution. Default - value is false. Type: boolean (or Expression with resultType boolean). - :type enable_logging: object - :param log_storage_settings: Log storage settings customer need to provide when enableLogging - is true. - :type log_storage_settings: ~azure.synapse.artifacts.models.LogStorageSettings - :param dataset: Required. Delete activity dataset reference. - :type dataset: ~azure.synapse.artifacts.models.DatasetReference + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, - 'max_concurrent_connections': {'minimum': 1}, - 'dataset': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'recursive': {'key': 'typeProperties.recursive', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'typeProperties.maxConcurrentConnections', 'type': 'int'}, - 'enable_logging': {'key': 'typeProperties.enableLogging', 'type': 'object'}, - 'log_storage_settings': {'key': 'typeProperties.logStorageSettings', 'type': 'LogStorageSettings'}, - 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, } def __init__( self, *, - name: str, - dataset: "DatasetReference", additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - depends_on: Optional[List["ActivityDependency"]] = None, - user_properties: Optional[List["UserProperty"]] = None, - linked_service_name: Optional["LinkedServiceReference"] = None, - policy: Optional["ActivityPolicy"] = None, - recursive: Optional[object] = None, - max_concurrent_connections: Optional[int] = None, - enable_logging: Optional[object] = None, - log_storage_settings: Optional["LogStorageSettings"] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, **kwargs ): - super(DeleteActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type = 'Delete' # type: str - self.recursive = recursive - self.max_concurrent_connections = max_concurrent_connections - self.enable_logging = enable_logging - self.log_storage_settings = log_storage_settings - self.dataset = dataset + super(ConcurSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'ConcurSource' # type: str + self.query = query -class DeleteDataFlowDebugSessionRequest(msrest.serialization.Model): - """Request body structure for deleting data flow debug session. +class ControlActivity(Activity): + """Base class for all control activities like IfCondition, ForEach , Until. - :param session_id: The ID of data flow debug session. - :type session_id: str - :param data_flow_name: The data flow which contains the debug session. - :type data_flow_name: str + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] """ + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + _attribute_map = { - 'session_id': {'key': 'sessionId', 'type': 'str'}, - 'data_flow_name': {'key': 'dataFlowName', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, } def __init__( self, *, - session_id: Optional[str] = None, - data_flow_name: Optional[str] = None, + name: str, + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + depends_on: Optional[List["ActivityDependency"]] = None, + user_properties: Optional[List["UserProperty"]] = None, **kwargs ): - super(DeleteDataFlowDebugSessionRequest, self).__init__(**kwargs) - self.session_id = session_id - self.data_flow_name = data_flow_name + super(ControlActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.type = 'Container' # type: str -class DelimitedTextDataset(Dataset): - """Delimited text dataset. +class CopyActivity(ExecutionActivity): + """Copy activity. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. :type type: str - :param description: Dataset description. + :param description: Activity description. :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param location: The location of the delimited text storage. - :type location: ~azure.synapse.artifacts.models.DatasetLocation - :param column_delimiter: The column delimiter. Type: string (or Expression with resultType - string). - :type column_delimiter: object - :param row_delimiter: The row delimiter. Type: string (or Expression with resultType string). - :type row_delimiter: object - :param encoding_name: The code page name of the preferred encoding. If miss, the default value - is UTF-8, unless BOM denotes another Unicode encoding. Refer to the name column of the table in - the following link to set supported values: - https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with - resultType string). - :type encoding_name: object - :param compression_codec: Possible values include: "bzip2", "gzip", "deflate", "zipDeflate", - "snappy", "lz4". - :type compression_codec: str or ~azure.synapse.artifacts.models.DelimitedTextCompressionCodec - :param compression_level: The data compression method used for DelimitedText. Possible values - include: "Optimal", "Fastest". - :type compression_level: str or ~azure.synapse.artifacts.models.DatasetCompressionLevel - :param quote_char: The quote character. Type: string (or Expression with resultType string). - :type quote_char: object - :param escape_char: The escape character. Type: string (or Expression with resultType string). - :type escape_char: object - :param first_row_as_header: When used as input, treat the first row of data as headers. When - used as output,write the headers into the output as the first row of data. The default value is + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param inputs: List of inputs for the activity. + :type inputs: list[~azure.synapse.artifacts.models.DatasetReference] + :param outputs: List of outputs for the activity. + :type outputs: list[~azure.synapse.artifacts.models.DatasetReference] + :param source: Required. Copy activity source. + :type source: ~azure.synapse.artifacts.models.CopySource + :param sink: Required. Copy activity sink. + :type sink: ~azure.synapse.artifacts.models.CopySink + :param translator: Copy activity translator. If not specified, tabular translator is used. + :type translator: object + :param enable_staging: Specifies whether to copy data via an interim staging. Default value is false. Type: boolean (or Expression with resultType boolean). - :type first_row_as_header: object - :param null_value: The null value string. Type: string (or Expression with resultType string). - :type null_value: object + :type enable_staging: object + :param staging_settings: Specifies interim staging settings when EnableStaging is true. + :type staging_settings: ~azure.synapse.artifacts.models.StagingSettings + :param parallel_copies: Maximum number of concurrent sessions opened on the source or sink to + avoid overloading the data store. Type: integer (or Expression with resultType integer), + minimum: 0. + :type parallel_copies: object + :param data_integration_units: Maximum number of data integration units that can be used to + perform this data movement. Type: integer (or Expression with resultType integer), minimum: 0. + :type data_integration_units: object + :param enable_skip_incompatible_row: Whether to skip incompatible row. Default value is false. + Type: boolean (or Expression with resultType boolean). + :type enable_skip_incompatible_row: object + :param redirect_incompatible_row_settings: Redirect incompatible row settings when + EnableSkipIncompatibleRow is true. + :type redirect_incompatible_row_settings: + ~azure.synapse.artifacts.models.RedirectIncompatibleRowSettings + :param preserve_rules: Preserve Rules. + :type preserve_rules: list[object] + :param preserve: Preserve rules. + :type preserve: list[object] """ _validation = { + 'name': {'required': True}, 'type': {'required': True}, - 'linked_service_name': {'required': True}, + 'source': {'required': True}, + 'sink': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'column_delimiter': {'key': 'typeProperties.columnDelimiter', 'type': 'object'}, - 'row_delimiter': {'key': 'typeProperties.rowDelimiter', 'type': 'object'}, - 'encoding_name': {'key': 'typeProperties.encodingName', 'type': 'object'}, - 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'str'}, - 'compression_level': {'key': 'typeProperties.compressionLevel', 'type': 'str'}, - 'quote_char': {'key': 'typeProperties.quoteChar', 'type': 'object'}, - 'escape_char': {'key': 'typeProperties.escapeChar', 'type': 'object'}, - 'first_row_as_header': {'key': 'typeProperties.firstRowAsHeader', 'type': 'object'}, - 'null_value': {'key': 'typeProperties.nullValue', 'type': 'object'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'inputs': {'key': 'inputs', 'type': '[DatasetReference]'}, + 'outputs': {'key': 'outputs', 'type': '[DatasetReference]'}, + 'source': {'key': 'typeProperties.source', 'type': 'CopySource'}, + 'sink': {'key': 'typeProperties.sink', 'type': 'CopySink'}, + 'translator': {'key': 'typeProperties.translator', 'type': 'object'}, + 'enable_staging': {'key': 'typeProperties.enableStaging', 'type': 'object'}, + 'staging_settings': {'key': 'typeProperties.stagingSettings', 'type': 'StagingSettings'}, + 'parallel_copies': {'key': 'typeProperties.parallelCopies', 'type': 'object'}, + 'data_integration_units': {'key': 'typeProperties.dataIntegrationUnits', 'type': 'object'}, + 'enable_skip_incompatible_row': {'key': 'typeProperties.enableSkipIncompatibleRow', 'type': 'object'}, + 'redirect_incompatible_row_settings': {'key': 'typeProperties.redirectIncompatibleRowSettings', 'type': 'RedirectIncompatibleRowSettings'}, + 'preserve_rules': {'key': 'typeProperties.preserveRules', 'type': '[object]'}, + 'preserve': {'key': 'typeProperties.preserve', 'type': '[object]'}, } def __init__( self, *, - linked_service_name: "LinkedServiceReference", + name: str, + source: "CopySource", + sink: "CopySink", additional_properties: Optional[Dict[str, object]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, - location: Optional["DatasetLocation"] = None, - column_delimiter: Optional[object] = None, - row_delimiter: Optional[object] = None, - encoding_name: Optional[object] = None, - compression_codec: Optional[Union[str, "DelimitedTextCompressionCodec"]] = None, - compression_level: Optional[Union[str, "DatasetCompressionLevel"]] = None, - quote_char: Optional[object] = None, - escape_char: Optional[object] = None, - first_row_as_header: Optional[object] = None, - null_value: Optional[object] = None, - **kwargs + depends_on: Optional[List["ActivityDependency"]] = None, + user_properties: Optional[List["UserProperty"]] = None, + linked_service_name: Optional["LinkedServiceReference"] = None, + policy: Optional["ActivityPolicy"] = None, + inputs: Optional[List["DatasetReference"]] = None, + outputs: Optional[List["DatasetReference"]] = None, + translator: Optional[object] = None, + enable_staging: Optional[object] = None, + staging_settings: Optional["StagingSettings"] = None, + parallel_copies: Optional[object] = None, + data_integration_units: Optional[object] = None, + enable_skip_incompatible_row: Optional[object] = None, + redirect_incompatible_row_settings: Optional["RedirectIncompatibleRowSettings"] = None, + preserve_rules: Optional[List[object]] = None, + preserve: Optional[List[object]] = None, + **kwargs ): - super(DelimitedTextDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'DelimitedText' # type: str - self.location = location - self.column_delimiter = column_delimiter - self.row_delimiter = row_delimiter - self.encoding_name = encoding_name - self.compression_codec = compression_codec - self.compression_level = compression_level - self.quote_char = quote_char - self.escape_char = escape_char - self.first_row_as_header = first_row_as_header - self.null_value = null_value + super(CopyActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.type = 'Copy' # type: str + self.inputs = inputs + self.outputs = outputs + self.source = source + self.sink = sink + self.translator = translator + self.enable_staging = enable_staging + self.staging_settings = staging_settings + self.parallel_copies = parallel_copies + self.data_integration_units = data_integration_units + self.enable_skip_incompatible_row = enable_skip_incompatible_row + self.redirect_incompatible_row_settings = redirect_incompatible_row_settings + self.preserve_rules = preserve_rules + self.preserve = preserve -class DocumentDbCollectionDataset(Dataset): - """Microsoft Azure Document Database Collection dataset. +class CopyTranslator(msrest.serialization.Model): + """A copy activity translator. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: TabularTranslator. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Copy translator type.Constant filled by server. :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param collection_name: Required. Document Database collection name. Type: string (or - Expression with resultType string). - :type collection_name: object """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, - 'collection_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'collection_name': {'key': 'typeProperties.collectionName', 'type': 'object'}, + } + + _subtype_map = { + 'type': {'TabularTranslator': 'TabularTranslator'} } def __init__( self, *, - linked_service_name: "LinkedServiceReference", - collection_name: object, additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, **kwargs ): - super(DocumentDbCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'DocumentDbCollection' # type: str - self.collection_name = collection_name + super(CopyTranslator, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = 'CopyTranslator' # type: str -class DrillLinkedService(LinkedService): - """Drill server linked service. +class CosmosDbLinkedService(LinkedService): + """Microsoft Azure Cosmos Database (CosmosDB) linked service. All required parameters must be populated in order to send to Azure. @@ -7656,11 +8636,17 @@ class DrillLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param connection_string: An ODBC connection string. Type: string, SecureString or + :param connection_string: The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object - :param pwd: The Azure key vault secret reference of password in connection string. - :type pwd: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param account_endpoint: The endpoint of the Azure CosmosDB account. Type: string (or + Expression with resultType string). + :type account_endpoint: object + :param database: The name of the database. Type: string (or Expression with resultType string). + :type database: object + :param account_key: The account key of the Azure CosmosDB account. Type: SecureString or + AzureKeyVaultSecretReference. + :type account_key: ~azure.synapse.artifacts.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -7679,7 +8665,9 @@ class DrillLinkedService(LinkedService): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, + 'account_endpoint': {'key': 'typeProperties.accountEndpoint', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'account_key': {'key': 'typeProperties.accountKey', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -7692,19 +8680,23 @@ def __init__( parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, connection_string: Optional[object] = None, - pwd: Optional["AzureKeyVaultSecretReference"] = None, + account_endpoint: Optional[object] = None, + database: Optional[object] = None, + account_key: Optional["SecretBase"] = None, encrypted_credential: Optional[object] = None, **kwargs ): - super(DrillLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'Drill' # type: str + super(CosmosDbLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'CosmosDb' # type: str self.connection_string = connection_string - self.pwd = pwd + self.account_endpoint = account_endpoint + self.database = database + self.account_key = account_key self.encrypted_credential = encrypted_credential -class DrillTableDataset(Dataset): - """Drill server dataset. +class CosmosDbMongoDbApiCollectionDataset(Dataset): + """The CosmosDB (MongoDB API) database dataset. All required parameters must be populated in order to send to Azure. @@ -7730,19 +8722,15 @@ class DrillTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table - properties instead. - :type table_name: object - :param table: The table name of the Drill. Type: string (or Expression with resultType string). - :type table: object - :param schema_type_properties_schema: The schema name of the Drill. Type: string (or Expression - with resultType string). - :type schema_type_properties_schema: object + :param collection: Required. The collection name of the CosmosDB (MongoDB API) database. Type: + string (or Expression with resultType string). + :type collection: object """ _validation = { 'type': {'required': True}, 'linked_service_name': {'required': True}, + 'collection': {'required': True}, } _attribute_map = { @@ -7755,15 +8743,14 @@ class DrillTableDataset(Dataset): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, } def __init__( self, *, linked_service_name: "LinkedServiceReference", + collection: object, additional_properties: Optional[Dict[str, object]] = None, description: Optional[str] = None, structure: Optional[object] = None, @@ -7771,20 +8758,15 @@ def __init__( parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, - table: Optional[object] = None, - schema_type_properties_schema: Optional[object] = None, **kwargs ): - super(DrillTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'DrillTable' # type: str - self.table_name = table_name - self.table = table - self.schema_type_properties_schema = schema_type_properties_schema + super(CosmosDbMongoDbApiCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'CosmosDbMongoDbApiCollection' # type: str + self.collection = collection -class DynamicsAXLinkedService(LinkedService): - """Dynamics AX linked service. +class CosmosDbMongoDbApiLinkedService(LinkedService): + """Linked service for CosmosDB (MongoDB API) data source. All required parameters must be populated in order to send to Azure. @@ -7801,36 +8783,19 @@ class DynamicsAXLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param url: Required. The Dynamics AX (or Dynamics 365 Finance and Operations) instance OData - endpoint. - :type url: object - :param service_principal_id: Required. Specify the application's client ID. Type: string (or - Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: Required. Specify the application's key. Mark this field as a - SecureString to store it securely in Data Factory, or reference a secret stored in Azure Key - Vault. Type: string (or Expression with resultType string). - :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase - :param tenant: Required. Specify the tenant information (domain name or tenant ID) under which - your application resides. Retrieve it by hovering the mouse in the top-right corner of the - Azure portal. Type: string (or Expression with resultType string). - :type tenant: object - :param aad_resource_id: Required. Specify the resource you are requesting authorization. Type: - string (or Expression with resultType string). - :type aad_resource_id: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param connection_string: Required. The CosmosDB (MongoDB API) connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param database: Required. The name of the CosmosDB (MongoDB API) database that you want to + access. Type: string (or Expression with resultType string). + :type database: object """ _validation = { 'type': {'required': True}, - 'url': {'required': True}, - 'service_principal_id': {'required': True}, - 'service_principal_key': {'required': True}, - 'tenant': {'required': True}, - 'aad_resource_id': {'required': True}, + 'connection_string': {'required': True}, + 'database': {'required': True}, } _attribute_map = { @@ -7840,316 +8805,165 @@ class DynamicsAXLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, } def __init__( self, *, - url: object, - service_principal_id: object, - service_principal_key: "SecretBase", - tenant: object, - aad_resource_id: object, + connection_string: object, + database: object, additional_properties: Optional[Dict[str, object]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, - encrypted_credential: Optional[object] = None, **kwargs ): - super(DynamicsAXLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'DynamicsAX' # type: str - self.url = url - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.tenant = tenant - self.aad_resource_id = aad_resource_id - self.encrypted_credential = encrypted_credential + super(CosmosDbMongoDbApiLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'CosmosDbMongoDbApi' # type: str + self.connection_string = connection_string + self.database = database -class DynamicsAXResourceDataset(Dataset): - """The path of the Dynamics AX OData entity. +class CosmosDbMongoDbApiSink(CopySink): + """A copy activity sink for a CosmosDB (MongoDB API) database. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Copy sink type.Constant filled by server. :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param path: Required. The path of the Dynamics AX OData entity. Type: string (or Expression - with resultType string). - :type path: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - 'path': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'path': {'key': 'typeProperties.path', 'type': 'object'}, - } - - def __init__( - self, - *, - linked_service_name: "LinkedServiceReference", - path: object, - additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, - **kwargs - ): - super(DynamicsAXResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'DynamicsAXResource' # type: str - self.path = path - - -class DynamicsCrmEntityDataset(Dataset): - """The Dynamics CRM entity dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param entity_name: The logical name of the entity. Type: string (or Expression with resultType - string). - :type entity_name: object + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param write_behavior: Specifies whether the document with same key to be overwritten (upsert) + rather than throw exception (insert). The default value is "insert". Type: string (or + Expression with resultType string). Type: string (or Expression with resultType string). + :type write_behavior: object """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, } def __init__( self, *, - linked_service_name: "LinkedServiceReference", additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, - entity_name: Optional[object] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + write_behavior: Optional[object] = None, **kwargs ): - super(DynamicsCrmEntityDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'DynamicsCrmEntity' # type: str - self.entity_name = entity_name + super(CosmosDbMongoDbApiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'CosmosDbMongoDbApiSink' # type: str + self.write_behavior = write_behavior -class DynamicsCrmLinkedService(LinkedService): - """Dynamics CRM linked service. +class CosmosDbMongoDbApiSource(CopySource): + """A copy activity source for a CosmosDB (MongoDB API) database. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param deployment_type: Required. The deployment type of the Dynamics CRM instance. 'Online' - for Dynamics CRM Online and 'OnPremisesWithIfd' for Dynamics CRM on-premises with Ifd. Type: - string (or Expression with resultType string). Possible values include: "Online", - "OnPremisesWithIfd". - :type deployment_type: str or ~azure.synapse.artifacts.models.DynamicsDeploymentType - :param host_name: The host name of the on-premises Dynamics CRM server. The property is - required for on-prem and not allowed for online. Type: string (or Expression with resultType - string). - :type host_name: object - :param port: The port of on-premises Dynamics CRM server. The property is required for on-prem - and not allowed for online. Default is 443. Type: integer (or Expression with resultType - integer), minimum: 0. - :type port: object - :param service_uri: The URL to the Microsoft Dynamics CRM server. The property is required for - on-line and not allowed for on-prem. Type: string (or Expression with resultType string). - :type service_uri: object - :param organization_name: The organization name of the Dynamics CRM instance. The property is - required for on-prem and required for online when there are more than one Dynamics CRM - instances associated with the user. Type: string (or Expression with resultType string). - :type organization_name: object - :param authentication_type: Required. The authentication type to connect to Dynamics CRM - server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, - 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or - Expression with resultType string). Possible values include: "Office365", "Ifd", - "AADServicePrincipal". - :type authentication_type: str or ~azure.synapse.artifacts.models.DynamicsAuthenticationType - :param username: User name to access the Dynamics CRM instance. Type: string (or Expression + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param filter: Specifies selection filter using query operators. To return all documents in a + collection, omit this parameter or pass an empty document ({}). Type: string (or Expression with resultType string). - :type username: object - :param password: Password to access the Dynamics CRM instance. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param service_principal_id: The client ID of the application in Azure Active Directory used - for Server-To-Server authentication. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_credential_type: The service principal credential type to use in - Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' - for certificate. Type: string (or Expression with resultType string). Possible values include: - "ServicePrincipalKey", "ServicePrincipalCert". - :type service_principal_credential_type: str or - ~azure.synapse.artifacts.models.DynamicsServicePrincipalCredentialType - :param service_principal_credential: The credential of the service principal object in Azure - Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', - servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If - servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only - be AzureKeyVaultSecretReference. - :type service_principal_credential: ~azure.synapse.artifacts.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :type filter: object + :param cursor_methods: Cursor methods for Mongodb query. + :type cursor_methods: ~azure.synapse.artifacts.models.MongoDbCursorMethodsProperties + :param batch_size: Specifies the number of documents to return in each batch of the response + from MongoDB instance. In most cases, modifying the batch size will not affect the user or the + application. This property's main purpose is to avoid hit the limitation of response size. + Type: integer (or Expression with resultType integer). + :type batch_size: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object """ _validation = { 'type': {'required': True}, - 'deployment_type': {'required': True}, - 'authentication_type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, - 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, - 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'str'}, - 'service_principal_credential': {'key': 'typeProperties.servicePrincipalCredential', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'filter': {'key': 'filter', 'type': 'object'}, + 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, + 'batch_size': {'key': 'batchSize', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, } def __init__( self, *, - deployment_type: Union[str, "DynamicsDeploymentType"], - authentication_type: Union[str, "DynamicsAuthenticationType"], additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, - description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - host_name: Optional[object] = None, - port: Optional[object] = None, - service_uri: Optional[object] = None, - organization_name: Optional[object] = None, - username: Optional[object] = None, - password: Optional["SecretBase"] = None, - service_principal_id: Optional[object] = None, - service_principal_credential_type: Optional[Union[str, "DynamicsServicePrincipalCredentialType"]] = None, - service_principal_credential: Optional["SecretBase"] = None, - encrypted_credential: Optional[object] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + filter: Optional[object] = None, + cursor_methods: Optional["MongoDbCursorMethodsProperties"] = None, + batch_size: Optional[object] = None, + query_timeout: Optional[object] = None, **kwargs ): - super(DynamicsCrmLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'DynamicsCrm' # type: str - self.deployment_type = deployment_type - self.host_name = host_name - self.port = port - self.service_uri = service_uri - self.organization_name = organization_name - self.authentication_type = authentication_type - self.username = username - self.password = password - self.service_principal_id = service_principal_id - self.service_principal_credential_type = service_principal_credential_type - self.service_principal_credential = service_principal_credential - self.encrypted_credential = encrypted_credential + super(CosmosDbMongoDbApiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'CosmosDbMongoDbApiSource' # type: str + self.filter = filter + self.cursor_methods = cursor_methods + self.batch_size = batch_size + self.query_timeout = query_timeout -class DynamicsEntityDataset(Dataset): - """The Dynamics entity dataset. +class CosmosDbSqlApiCollectionDataset(Dataset): + """Microsoft Azure CosmosDB (SQL API) Collection dataset. All required parameters must be populated in order to send to Azure. @@ -8175,14 +8989,15 @@ class DynamicsEntityDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param entity_name: The logical name of the entity. Type: string (or Expression with resultType - string). - :type entity_name: object + :param collection_name: Required. CosmosDB (SQL API) collection name. Type: string (or + Expression with resultType string). + :type collection_name: object """ _validation = { 'type': {'required': True}, 'linked_service_name': {'required': True}, + 'collection_name': {'required': True}, } _attribute_map = { @@ -8195,13 +9010,14 @@ class DynamicsEntityDataset(Dataset): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, + 'collection_name': {'key': 'typeProperties.collectionName', 'type': 'object'}, } def __init__( self, *, linked_service_name: "LinkedServiceReference", + collection_name: object, additional_properties: Optional[Dict[str, object]] = None, description: Optional[str] = None, structure: Optional[object] = None, @@ -8209,148 +9025,140 @@ def __init__( parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, folder: Optional["DatasetFolder"] = None, - entity_name: Optional[object] = None, **kwargs ): - super(DynamicsEntityDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'DynamicsEntity' # type: str - self.entity_name = entity_name + super(CosmosDbSqlApiCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'CosmosDbSqlApiCollection' # type: str + self.collection_name = collection_name -class DynamicsLinkedService(LinkedService): - """Dynamics linked service. +class CosmosDbSqlApiSink(CopySink): + """A copy activity Azure CosmosDB (SQL API) Collection sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Copy sink type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param deployment_type: Required. The deployment type of the Dynamics instance. 'Online' for - Dynamics Online and 'OnPremisesWithIfd' for Dynamics on-premises with Ifd. Type: string (or - Expression with resultType string). Possible values include: "Online", "OnPremisesWithIfd". - :type deployment_type: str or ~azure.synapse.artifacts.models.DynamicsDeploymentType - :param host_name: The host name of the on-premises Dynamics server. The property is required - for on-prem and not allowed for online. Type: string (or Expression with resultType string). - :type host_name: str - :param port: The port of on-premises Dynamics server. The property is required for on-prem and - not allowed for online. Default is 443. Type: integer (or Expression with resultType integer), - minimum: 0. - :type port: str - :param service_uri: The URL to the Microsoft Dynamics server. The property is required for on- - line and not allowed for on-prem. Type: string (or Expression with resultType string). - :type service_uri: str - :param organization_name: The organization name of the Dynamics instance. The property is - required for on-prem and required for online when there are more than one Dynamics instances - associated with the user. Type: string (or Expression with resultType string). - :type organization_name: str - :param authentication_type: Required. The authentication type to connect to Dynamics server. - 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, 'AADServicePrincipal' - for Server-To-Server authentication in online scenario. Type: string (or Expression with - resultType string). Possible values include: "Office365", "Ifd", "AADServicePrincipal". - :type authentication_type: str or ~azure.synapse.artifacts.models.DynamicsAuthenticationType - :param username: User name to access the Dynamics instance. Type: string (or Expression with - resultType string). - :type username: object - :param password: Password to access the Dynamics instance. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param service_principal_id: The client ID of the application in Azure Active Directory used - for Server-To-Server authentication. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_credential_type: The service principal credential type to use in - Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' - for certificate. Type: string (or Expression with resultType string). Possible values include: - "ServicePrincipalKey", "ServicePrincipalCert". - :type service_principal_credential_type: str or - ~azure.synapse.artifacts.models.DynamicsServicePrincipalCredentialType - :param service_principal_credential: The credential of the service principal object in Azure - Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', - servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If - servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only - be AzureKeyVaultSecretReference. - :type service_principal_credential: ~azure.synapse.artifacts.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param write_behavior: Describes how to write data to Azure Cosmos DB. Type: string (or + Expression with resultType string). Allowed values: insert and upsert. + :type write_behavior: object """ _validation = { 'type': {'required': True}, - 'deployment_type': {'required': True}, - 'authentication_type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, - 'host_name': {'key': 'typeProperties.hostName', 'type': 'str'}, - 'port': {'key': 'typeProperties.port', 'type': 'str'}, - 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'str'}, - 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'str'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'str'}, - 'service_principal_credential': {'key': 'typeProperties.servicePrincipalCredential', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, } def __init__( self, *, - deployment_type: Union[str, "DynamicsDeploymentType"], - authentication_type: Union[str, "DynamicsAuthenticationType"], additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, - description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - host_name: Optional[str] = None, - port: Optional[str] = None, - service_uri: Optional[str] = None, - organization_name: Optional[str] = None, - username: Optional[object] = None, - password: Optional["SecretBase"] = None, - service_principal_id: Optional[object] = None, - service_principal_credential_type: Optional[Union[str, "DynamicsServicePrincipalCredentialType"]] = None, - service_principal_credential: Optional["SecretBase"] = None, - encrypted_credential: Optional[object] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + write_behavior: Optional[object] = None, **kwargs ): - super(DynamicsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'Dynamics' # type: str - self.deployment_type = deployment_type - self.host_name = host_name - self.port = port - self.service_uri = service_uri - self.organization_name = organization_name - self.authentication_type = authentication_type - self.username = username - self.password = password - self.service_principal_id = service_principal_id - self.service_principal_credential_type = service_principal_credential_type - self.service_principal_credential = service_principal_credential - self.encrypted_credential = encrypted_credential + super(CosmosDbSqlApiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'CosmosDbSqlApiSink' # type: str + self.write_behavior = write_behavior -class EloquaLinkedService(LinkedService): - """Eloqua server linked service. +class CosmosDbSqlApiSource(CopySource): + """A copy activity Azure CosmosDB (SQL API) Collection source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query: SQL API query. Type: string (or Expression with resultType string). + :type query: object + :param page_size: Page size of the result. Type: integer (or Expression with resultType + integer). + :type page_size: object + :param preferred_regions: Preferred regions. Type: array of strings (or Expression with + resultType array of strings). + :type preferred_regions: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + 'page_size': {'key': 'pageSize', 'type': 'object'}, + 'preferred_regions': {'key': 'preferredRegions', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query: Optional[object] = None, + page_size: Optional[object] = None, + preferred_regions: Optional[object] = None, + **kwargs + ): + super(CosmosDbSqlApiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'CosmosDbSqlApiSource' # type: str + self.query = query + self.page_size = page_size + self.preferred_regions = preferred_regions + + +class CouchbaseLinkedService(LinkedService): + """Couchbase server linked service. All required parameters must be populated in order to send to Azure. @@ -8367,23 +9175,11 @@ class EloquaLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param endpoint: Required. The endpoint of the Eloqua server. (i.e. eloqua.example.com). - :type endpoint: object - :param username: Required. The site name and user name of your Eloqua account in the form: - sitename/username. (i.e. Eloqua/Alice). - :type username: object - :param password: The password corresponding to the user name. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using - HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name in the server's - certificate to match the host name of the server when connecting over SSL. The default value is - true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of the server when - connecting over SSL. The default value is true. - :type use_peer_verification: object + :param connection_string: An ODBC connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param cred_string: The Azure key vault secret reference of credString in connection string. + :type cred_string: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -8392,8 +9188,6 @@ class EloquaLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'endpoint': {'required': True}, - 'username': {'required': True}, } _attribute_map = { @@ -8403,75 +9197,120 @@ class EloquaLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'cred_string': {'key': 'typeProperties.credString', 'type': 'AzureKeyVaultSecretReference'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, *, - endpoint: object, - username: object, additional_properties: Optional[Dict[str, object]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, - password: Optional["SecretBase"] = None, - use_encrypted_endpoints: Optional[object] = None, - use_host_verification: Optional[object] = None, - use_peer_verification: Optional[object] = None, + connection_string: Optional[object] = None, + cred_string: Optional["AzureKeyVaultSecretReference"] = None, encrypted_credential: Optional[object] = None, **kwargs ): - super(EloquaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'Eloqua' # type: str - self.endpoint = endpoint - self.username = username - self.password = password - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification + super(CouchbaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'Couchbase' # type: str + self.connection_string = connection_string + self.cred_string = cred_string self.encrypted_credential = encrypted_credential -class EloquaObjectDataset(Dataset): - """Eloqua server dataset. +class CouchbaseSource(TabularSource): + """A copy activity Couchbase server source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object - """ - - _validation = { + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(CouchbaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'CouchbaseSource' # type: str + self.query = query + + +class CouchbaseTableDataset(Dataset): + """Couchbase server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: The table name. Type: string (or Expression with resultType string). + :type table_name: object + """ + + _validation = { 'type': {'required': True}, 'linked_service_name': {'required': True}, } @@ -8503,54 +9342,107 @@ def __init__( table_name: Optional[object] = None, **kwargs ): - super(EloquaObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'EloquaObject' # type: str + super(CouchbaseTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'CouchbaseTable' # type: str self.table_name = table_name -class EvaluateDataFlowExpressionRequest(msrest.serialization.Model): - """Request body structure for data flow expression preview. +class CreateDataFlowDebugSessionRequest(msrest.serialization.Model): + """Request body structure for creating data flow debug session. + + :param data_flow_name: The name of the data flow. + :type data_flow_name: str + :param existing_cluster_id: The ID of existing Databricks cluster. + :type existing_cluster_id: str + :param cluster_timeout: Timeout setting for Databricks cluster. + :type cluster_timeout: int + :param new_cluster_name: The name of new Databricks cluster. + :type new_cluster_name: str + :param new_cluster_node_type: The type of new Databricks cluster. + :type new_cluster_node_type: str + :param data_bricks_linked_service: Data bricks linked service. + :type data_bricks_linked_service: ~azure.synapse.artifacts.models.LinkedServiceResource + """ + + _attribute_map = { + 'data_flow_name': {'key': 'dataFlowName', 'type': 'str'}, + 'existing_cluster_id': {'key': 'existingClusterId', 'type': 'str'}, + 'cluster_timeout': {'key': 'clusterTimeout', 'type': 'int'}, + 'new_cluster_name': {'key': 'newClusterName', 'type': 'str'}, + 'new_cluster_node_type': {'key': 'newClusterNodeType', 'type': 'str'}, + 'data_bricks_linked_service': {'key': 'dataBricksLinkedService', 'type': 'LinkedServiceResource'}, + } + + def __init__( + self, + *, + data_flow_name: Optional[str] = None, + existing_cluster_id: Optional[str] = None, + cluster_timeout: Optional[int] = None, + new_cluster_name: Optional[str] = None, + new_cluster_node_type: Optional[str] = None, + data_bricks_linked_service: Optional["LinkedServiceResource"] = None, + **kwargs + ): + super(CreateDataFlowDebugSessionRequest, self).__init__(**kwargs) + self.data_flow_name = data_flow_name + self.existing_cluster_id = existing_cluster_id + self.cluster_timeout = cluster_timeout + self.new_cluster_name = new_cluster_name + self.new_cluster_node_type = new_cluster_node_type + self.data_bricks_linked_service = data_bricks_linked_service + + +class CreateDataFlowDebugSessionResponse(msrest.serialization.Model): + """Response body structure for creating data flow debug session. :param session_id: The ID of data flow debug session. :type session_id: str - :param data_flow_name: The data flow which contains the debug session. - :type data_flow_name: str - :param stream_name: The output stream name. - :type stream_name: str - :param row_limits: The row limit for preview request. - :type row_limits: int - :param expression: The expression for preview. - :type expression: str """ _attribute_map = { 'session_id': {'key': 'sessionId', 'type': 'str'}, - 'data_flow_name': {'key': 'dataFlowName', 'type': 'str'}, - 'stream_name': {'key': 'streamName', 'type': 'str'}, - 'row_limits': {'key': 'rowLimits', 'type': 'int'}, - 'expression': {'key': 'expression', 'type': 'str'}, } def __init__( self, *, session_id: Optional[str] = None, - data_flow_name: Optional[str] = None, - stream_name: Optional[str] = None, - row_limits: Optional[int] = None, - expression: Optional[str] = None, **kwargs ): - super(EvaluateDataFlowExpressionRequest, self).__init__(**kwargs) + super(CreateDataFlowDebugSessionResponse, self).__init__(**kwargs) self.session_id = session_id - self.data_flow_name = data_flow_name - self.stream_name = stream_name - self.row_limits = row_limits - self.expression = expression -class ExecuteDataFlowActivity(ExecutionActivity): - """Execute data flow activity. +class CreateRunResponse(msrest.serialization.Model): + """Response body with a run identifier. + + All required parameters must be populated in order to send to Azure. + + :param run_id: Required. Identifier of a run. + :type run_id: str + """ + + _validation = { + 'run_id': {'required': True}, + } + + _attribute_map = { + 'run_id': {'key': 'runId', 'type': 'str'}, + } + + def __init__( + self, + *, + run_id: str, + **kwargs + ): + super(CreateRunResponse, self).__init__(**kwargs) + self.run_id = run_id + + +class CustomActivity(ExecutionActivity): + """Custom activity type. All required parameters must be populated in order to send to Azure. @@ -8571,20 +9463,29 @@ class ExecuteDataFlowActivity(ExecutionActivity): :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param data_flow: Required. Data flow reference. - :type data_flow: ~azure.synapse.artifacts.models.DataFlowReference - :param staging: Staging info for execute data flow activity. - :type staging: ~azure.synapse.artifacts.models.DataFlowStagingInfo - :param integration_runtime: The integration runtime reference. - :type integration_runtime: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param compute: Compute properties for data flow activity. - :type compute: ~azure.synapse.artifacts.models.ExecuteDataFlowActivityTypePropertiesCompute + :param command: Required. Command for custom activity Type: string (or Expression with + resultType string). + :type command: object + :param resource_linked_service: Resource linked service reference. + :type resource_linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference + :param folder_path: Folder path for resource files Type: string (or Expression with resultType + string). + :type folder_path: object + :param reference_objects: Reference objects. + :type reference_objects: ~azure.synapse.artifacts.models.CustomActivityReferenceObject + :param extended_properties: User defined property bag. There is no restriction on the keys or + values that can be used. The user specified custom activity has the full responsibility to + consume and interpret the content defined. + :type extended_properties: dict[str, object] + :param retention_time_in_days: The retention time for the files submitted for custom activity. + Type: double (or Expression with resultType double). + :type retention_time_in_days: object """ _validation = { 'name': {'required': True}, 'type': {'required': True}, - 'data_flow': {'required': True}, + 'command': {'required': True}, } _attribute_map = { @@ -8596,131 +9497,223 @@ class ExecuteDataFlowActivity(ExecutionActivity): 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'data_flow': {'key': 'typeProperties.dataFlow', 'type': 'DataFlowReference'}, - 'staging': {'key': 'typeProperties.staging', 'type': 'DataFlowStagingInfo'}, - 'integration_runtime': {'key': 'typeProperties.integrationRuntime', 'type': 'IntegrationRuntimeReference'}, - 'compute': {'key': 'typeProperties.compute', 'type': 'ExecuteDataFlowActivityTypePropertiesCompute'}, + 'command': {'key': 'typeProperties.command', 'type': 'object'}, + 'resource_linked_service': {'key': 'typeProperties.resourceLinkedService', 'type': 'LinkedServiceReference'}, + 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, + 'reference_objects': {'key': 'typeProperties.referenceObjects', 'type': 'CustomActivityReferenceObject'}, + 'extended_properties': {'key': 'typeProperties.extendedProperties', 'type': '{object}'}, + 'retention_time_in_days': {'key': 'typeProperties.retentionTimeInDays', 'type': 'object'}, } def __init__( self, *, name: str, - data_flow: "DataFlowReference", + command: object, additional_properties: Optional[Dict[str, object]] = None, description: Optional[str] = None, depends_on: Optional[List["ActivityDependency"]] = None, user_properties: Optional[List["UserProperty"]] = None, linked_service_name: Optional["LinkedServiceReference"] = None, policy: Optional["ActivityPolicy"] = None, - staging: Optional["DataFlowStagingInfo"] = None, - integration_runtime: Optional["IntegrationRuntimeReference"] = None, - compute: Optional["ExecuteDataFlowActivityTypePropertiesCompute"] = None, + resource_linked_service: Optional["LinkedServiceReference"] = None, + folder_path: Optional[object] = None, + reference_objects: Optional["CustomActivityReferenceObject"] = None, + extended_properties: Optional[Dict[str, object]] = None, + retention_time_in_days: Optional[object] = None, **kwargs ): - super(ExecuteDataFlowActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type = 'ExecuteDataFlow' # type: str - self.data_flow = data_flow - self.staging = staging - self.integration_runtime = integration_runtime - self.compute = compute + super(CustomActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.type = 'Custom' # type: str + self.command = command + self.resource_linked_service = resource_linked_service + self.folder_path = folder_path + self.reference_objects = reference_objects + self.extended_properties = extended_properties + self.retention_time_in_days = retention_time_in_days -class ExecuteDataFlowActivityTypePropertiesCompute(msrest.serialization.Model): - """Compute properties for data flow activity. +class CustomActivityReferenceObject(msrest.serialization.Model): + """Reference objects for custom activity. - :param compute_type: Compute type of the cluster which will execute data flow job. Possible - values include: "General", "MemoryOptimized", "ComputeOptimized". - :type compute_type: str or ~azure.synapse.artifacts.models.DataFlowComputeType - :param core_count: Core count of the cluster which will execute data flow job. Supported values - are: 8, 16, 32, 48, 80, 144 and 272. - :type core_count: int + :param linked_services: Linked service references. + :type linked_services: list[~azure.synapse.artifacts.models.LinkedServiceReference] + :param datasets: Dataset references. + :type datasets: list[~azure.synapse.artifacts.models.DatasetReference] """ _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'core_count': {'key': 'coreCount', 'type': 'int'}, + 'linked_services': {'key': 'linkedServices', 'type': '[LinkedServiceReference]'}, + 'datasets': {'key': 'datasets', 'type': '[DatasetReference]'}, } def __init__( self, *, - compute_type: Optional[Union[str, "DataFlowComputeType"]] = None, - core_count: Optional[int] = None, + linked_services: Optional[List["LinkedServiceReference"]] = None, + datasets: Optional[List["DatasetReference"]] = None, **kwargs ): - super(ExecuteDataFlowActivityTypePropertiesCompute, self).__init__(**kwargs) - self.compute_type = compute_type - self.core_count = core_count + super(CustomActivityReferenceObject, self).__init__(**kwargs) + self.linked_services = linked_services + self.datasets = datasets -class ExecutePipelineActivity(Activity): - """Execute pipeline activity. +class CustomDataset(Dataset): + """The custom dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. Type of dataset.Constant filled by server. :type type: str - :param description: Activity description. + :param description: Dataset description. :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param pipeline: Required. Pipeline reference. - :type pipeline: ~azure.synapse.artifacts.models.PipelineReference - :param parameters: Pipeline parameters. - :type parameters: dict[str, object] - :param wait_on_completion: Defines whether activity execution will wait for the dependent - pipeline execution to finish. Default is false. - :type wait_on_completion: bool + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param type_properties: Custom dataset properties. + :type type_properties: object """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, - 'pipeline': {'required': True}, + 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'pipeline': {'key': 'typeProperties.pipeline', 'type': 'PipelineReference'}, - 'parameters': {'key': 'typeProperties.parameters', 'type': '{object}'}, - 'wait_on_completion': {'key': 'typeProperties.waitOnCompletion', 'type': 'bool'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type_properties': {'key': 'typeProperties', 'type': 'object'}, } def __init__( self, *, - name: str, - pipeline: "PipelineReference", + linked_service_name: "LinkedServiceReference", additional_properties: Optional[Dict[str, object]] = None, description: Optional[str] = None, - depends_on: Optional[List["ActivityDependency"]] = None, - user_properties: Optional[List["UserProperty"]] = None, - parameters: Optional[Dict[str, object]] = None, - wait_on_completion: Optional[bool] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + type_properties: Optional[object] = None, **kwargs ): - super(ExecutePipelineActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.type = 'ExecutePipeline' # type: str - self.pipeline = pipeline - self.parameters = parameters - self.wait_on_completion = wait_on_completion + super(CustomDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'CustomDataset' # type: str + self.type_properties = type_properties -class ExecuteSSISPackageActivity(ExecutionActivity): - """Execute SSIS package activity. +class CustomDataSourceLinkedService(LinkedService): + """Custom linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param type_properties: Required. Custom linked service properties. + :type type_properties: object + """ + + _validation = { + 'type': {'required': True}, + 'type_properties': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type_properties': {'key': 'typeProperties', 'type': 'object'}, + } + + def __init__( + self, + *, + type_properties: object, + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + **kwargs + ): + super(CustomDataSourceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'CustomDataSource' # type: str + self.type_properties = type_properties + + +class CustomSetupBase(msrest.serialization.Model): + """The base definition of the custom setup. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: . + + All required parameters must be populated in order to send to Azure. + + :param type: Required. The type of custom setup.Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {} + } + + def __init__( + self, + **kwargs + ): + super(CustomSetupBase, self).__init__(**kwargs) + self.type = None # type: Optional[str] + + +class DatabricksNotebookActivity(ExecutionActivity): + """DatabricksNotebook activity. All required parameters must be populated in order to send to Azure. @@ -8741,42 +9734,21 @@ class ExecuteSSISPackageActivity(ExecutionActivity): :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param package_location: Required. SSIS package location. - :type package_location: ~azure.synapse.artifacts.models.SSISPackageLocation - :param runtime: Specifies the runtime to execute SSIS package. The value should be "x86" or - "x64". Type: string (or Expression with resultType string). - :type runtime: object - :param logging_level: The logging level of SSIS package execution. Type: string (or Expression - with resultType string). - :type logging_level: object - :param environment_path: The environment path to execute the SSIS package. Type: string (or - Expression with resultType string). - :type environment_path: object - :param execution_credential: The package execution credential. - :type execution_credential: ~azure.synapse.artifacts.models.SSISExecutionCredential - :param connect_via: Required. The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param project_parameters: The project level parameters to execute the SSIS package. - :type project_parameters: dict[str, ~azure.synapse.artifacts.models.SSISExecutionParameter] - :param package_parameters: The package level parameters to execute the SSIS package. - :type package_parameters: dict[str, ~azure.synapse.artifacts.models.SSISExecutionParameter] - :param project_connection_managers: The project level connection managers to execute the SSIS - package. - :type project_connection_managers: dict[str, object] - :param package_connection_managers: The package level connection managers to execute the SSIS - package. - :type package_connection_managers: dict[str, object] - :param property_overrides: The property overrides to execute the SSIS package. - :type property_overrides: dict[str, ~azure.synapse.artifacts.models.SSISPropertyOverride] - :param log_location: SSIS package execution log location. - :type log_location: ~azure.synapse.artifacts.models.SSISLogLocation + :param notebook_path: Required. The absolute path of the notebook to be run in the Databricks + Workspace. This path must begin with a slash. Type: string (or Expression with resultType + string). + :type notebook_path: object + :param base_parameters: Base parameters to be used for each run of this job.If the notebook + takes a parameter that is not specified, the default value from the notebook will be used. + :type base_parameters: dict[str, object] + :param libraries: A list of libraries to be installed on the cluster that will execute the job. + :type libraries: list[dict[str, object]] """ _validation = { 'name': {'required': True}, 'type': {'required': True}, - 'package_location': {'required': True}, - 'connect_via': {'required': True}, + 'notebook_path': {'required': True}, } _attribute_map = { @@ -8788,223 +9760,35 @@ class ExecuteSSISPackageActivity(ExecutionActivity): 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'package_location': {'key': 'typeProperties.packageLocation', 'type': 'SSISPackageLocation'}, - 'runtime': {'key': 'typeProperties.runtime', 'type': 'object'}, - 'logging_level': {'key': 'typeProperties.loggingLevel', 'type': 'object'}, - 'environment_path': {'key': 'typeProperties.environmentPath', 'type': 'object'}, - 'execution_credential': {'key': 'typeProperties.executionCredential', 'type': 'SSISExecutionCredential'}, - 'connect_via': {'key': 'typeProperties.connectVia', 'type': 'IntegrationRuntimeReference'}, - 'project_parameters': {'key': 'typeProperties.projectParameters', 'type': '{SSISExecutionParameter}'}, - 'package_parameters': {'key': 'typeProperties.packageParameters', 'type': '{SSISExecutionParameter}'}, - 'project_connection_managers': {'key': 'typeProperties.projectConnectionManagers', 'type': '{object}'}, - 'package_connection_managers': {'key': 'typeProperties.packageConnectionManagers', 'type': '{object}'}, - 'property_overrides': {'key': 'typeProperties.propertyOverrides', 'type': '{SSISPropertyOverride}'}, - 'log_location': {'key': 'typeProperties.logLocation', 'type': 'SSISLogLocation'}, + 'notebook_path': {'key': 'typeProperties.notebookPath', 'type': 'object'}, + 'base_parameters': {'key': 'typeProperties.baseParameters', 'type': '{object}'}, + 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, } def __init__( self, *, name: str, - package_location: "SSISPackageLocation", - connect_via: "IntegrationRuntimeReference", + notebook_path: object, additional_properties: Optional[Dict[str, object]] = None, description: Optional[str] = None, depends_on: Optional[List["ActivityDependency"]] = None, user_properties: Optional[List["UserProperty"]] = None, linked_service_name: Optional["LinkedServiceReference"] = None, policy: Optional["ActivityPolicy"] = None, - runtime: Optional[object] = None, - logging_level: Optional[object] = None, - environment_path: Optional[object] = None, - execution_credential: Optional["SSISExecutionCredential"] = None, - project_parameters: Optional[Dict[str, "SSISExecutionParameter"]] = None, - package_parameters: Optional[Dict[str, "SSISExecutionParameter"]] = None, - project_connection_managers: Optional[Dict[str, object]] = None, - package_connection_managers: Optional[Dict[str, object]] = None, - property_overrides: Optional[Dict[str, "SSISPropertyOverride"]] = None, - log_location: Optional["SSISLogLocation"] = None, + base_parameters: Optional[Dict[str, object]] = None, + libraries: Optional[List[Dict[str, object]]] = None, **kwargs ): - super(ExecuteSSISPackageActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type = 'ExecuteSSISPackage' # type: str - self.package_location = package_location - self.runtime = runtime - self.logging_level = logging_level - self.environment_path = environment_path - self.execution_credential = execution_credential - self.connect_via = connect_via - self.project_parameters = project_parameters - self.package_parameters = package_parameters - self.project_connection_managers = project_connection_managers - self.package_connection_managers = package_connection_managers - self.property_overrides = property_overrides - self.log_location = log_location - + super(DatabricksNotebookActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.type = 'DatabricksNotebook' # type: str + self.notebook_path = notebook_path + self.base_parameters = base_parameters + self.libraries = libraries -class ExposureControlRequest(msrest.serialization.Model): - """The exposure control request. - :param feature_name: The feature name. - :type feature_name: str - :param feature_type: The feature type. - :type feature_type: str - """ - - _attribute_map = { - 'feature_name': {'key': 'featureName', 'type': 'str'}, - 'feature_type': {'key': 'featureType', 'type': 'str'}, - } - - def __init__( - self, - *, - feature_name: Optional[str] = None, - feature_type: Optional[str] = None, - **kwargs - ): - super(ExposureControlRequest, self).__init__(**kwargs) - self.feature_name = feature_name - self.feature_type = feature_type - - -class ExposureControlResponse(msrest.serialization.Model): - """The exposure control response. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar feature_name: The feature name. - :vartype feature_name: str - :ivar value: The feature value. - :vartype value: str - """ - - _validation = { - 'feature_name': {'readonly': True}, - 'value': {'readonly': True}, - } - - _attribute_map = { - 'feature_name': {'key': 'featureName', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(ExposureControlResponse, self).__init__(**kwargs) - self.feature_name = None - self.value = None - - -class Expression(msrest.serialization.Model): - """Azure Synapse expression definition. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Expression type. Possible values include: "Expression". - :type type: str or ~azure.synapse.artifacts.models.ExpressionType - :param value: Required. Expression value. - :type value: str - """ - - _validation = { - 'type': {'required': True}, - 'value': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'str'}, - } - - def __init__( - self, - *, - type: Union[str, "ExpressionType"], - value: str, - **kwargs - ): - super(Expression, self).__init__(**kwargs) - self.type = type - self.value = value - - -class FileServerLinkedService(LinkedService): - """File system linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param host: Required. Host name of the server. Type: string (or Expression with resultType - string). - :type host: object - :param user_id: User ID to logon the server. Type: string (or Expression with resultType - string). - :type user_id: object - :param password: Password to logon the server. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'user_id': {'key': 'typeProperties.userId', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - *, - host: object, - additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, - description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - user_id: Optional[object] = None, - password: Optional["SecretBase"] = None, - encrypted_credential: Optional[object] = None, - **kwargs - ): - super(FileServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'FileServer' # type: str - self.host = host - self.user_id = user_id - self.password = password - self.encrypted_credential = encrypted_credential - - -class FilterActivity(Activity): - """Filter and return results from input array based on the conditions. +class DatabricksSparkJarActivity(ExecutionActivity): + """DatabricksSparkJar activity. All required parameters must be populated in order to send to Azure. @@ -9021,17 +9805,24 @@ class FilterActivity(Activity): :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param items: Required. Input array on which filter should be applied. - :type items: ~azure.synapse.artifacts.models.Expression - :param condition: Required. Condition to be used for filtering the input. - :type condition: ~azure.synapse.artifacts.models.Expression + :param linked_service_name: Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param main_class_name: Required. The full name of the class containing the main method to be + executed. This class must be contained in a JAR provided as a library. Type: string (or + Expression with resultType string). + :type main_class_name: object + :param parameters: Parameters that will be passed to the main method. + :type parameters: list[object] + :param libraries: A list of libraries to be installed on the cluster that will execute the job. + :type libraries: list[dict[str, object]] """ _validation = { 'name': {'required': True}, 'type': {'required': True}, - 'items': {'required': True}, - 'condition': {'required': True}, + 'main_class_name': {'required': True}, } _attribute_map = { @@ -9041,30 +9832,37 @@ class FilterActivity(Activity): 'description': {'key': 'description', 'type': 'str'}, 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'items': {'key': 'typeProperties.items', 'type': 'Expression'}, - 'condition': {'key': 'typeProperties.condition', 'type': 'Expression'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'main_class_name': {'key': 'typeProperties.mainClassName', 'type': 'object'}, + 'parameters': {'key': 'typeProperties.parameters', 'type': '[object]'}, + 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, } def __init__( self, *, name: str, - items: "Expression", - condition: "Expression", + main_class_name: object, additional_properties: Optional[Dict[str, object]] = None, description: Optional[str] = None, depends_on: Optional[List["ActivityDependency"]] = None, user_properties: Optional[List["UserProperty"]] = None, + linked_service_name: Optional["LinkedServiceReference"] = None, + policy: Optional["ActivityPolicy"] = None, + parameters: Optional[List[object]] = None, + libraries: Optional[List[Dict[str, object]]] = None, **kwargs ): - super(FilterActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.type = 'Filter' # type: str - self.items = items - self.condition = condition + super(DatabricksSparkJarActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.type = 'DatabricksSparkJar' # type: str + self.main_class_name = main_class_name + self.parameters = parameters + self.libraries = libraries -class ForEachActivity(Activity): - """This activity is used for iterating over a collection and execute given activities. +class DatabricksSparkPythonActivity(ExecutionActivity): + """DatabricksSparkPython activity. All required parameters must be populated in order to send to Azure. @@ -9081,23 +9879,23 @@ class ForEachActivity(Activity): :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param is_sequential: Should the loop be executed in sequence or in parallel (max 50). - :type is_sequential: bool - :param batch_count: Batch count to be used for controlling the number of parallel execution - (when isSequential is set to false). - :type batch_count: int - :param items: Required. Collection to iterate. - :type items: ~azure.synapse.artifacts.models.Expression - :param activities: Required. List of activities to execute . - :type activities: list[~azure.synapse.artifacts.models.Activity] + :param linked_service_name: Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param python_file: Required. The URI of the Python file to be executed. DBFS paths are + supported. Type: string (or Expression with resultType string). + :type python_file: object + :param parameters: Command line parameters that will be passed to the Python file. + :type parameters: list[object] + :param libraries: A list of libraries to be installed on the cluster that will execute the job. + :type libraries: list[dict[str, object]] """ _validation = { 'name': {'required': True}, 'type': {'required': True}, - 'batch_count': {'maximum': 50}, - 'items': {'required': True}, - 'activities': {'required': True}, + 'python_file': {'required': True}, } _attribute_map = { @@ -9107,688 +9905,7209 @@ class ForEachActivity(Activity): 'description': {'key': 'description', 'type': 'str'}, 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'is_sequential': {'key': 'typeProperties.isSequential', 'type': 'bool'}, - 'batch_count': {'key': 'typeProperties.batchCount', 'type': 'int'}, - 'items': {'key': 'typeProperties.items', 'type': 'Expression'}, - 'activities': {'key': 'typeProperties.activities', 'type': '[Activity]'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'python_file': {'key': 'typeProperties.pythonFile', 'type': 'object'}, + 'parameters': {'key': 'typeProperties.parameters', 'type': '[object]'}, + 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, } def __init__( self, *, name: str, - items: "Expression", - activities: List["Activity"], + python_file: object, additional_properties: Optional[Dict[str, object]] = None, description: Optional[str] = None, depends_on: Optional[List["ActivityDependency"]] = None, user_properties: Optional[List["UserProperty"]] = None, - is_sequential: Optional[bool] = None, - batch_count: Optional[int] = None, + linked_service_name: Optional["LinkedServiceReference"] = None, + policy: Optional["ActivityPolicy"] = None, + parameters: Optional[List[object]] = None, + libraries: Optional[List[Dict[str, object]]] = None, **kwargs ): - super(ForEachActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.type = 'ForEach' # type: str - self.is_sequential = is_sequential - self.batch_count = batch_count - self.items = items - self.activities = activities + super(DatabricksSparkPythonActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.type = 'DatabricksSparkPython' # type: str + self.python_file = python_file + self.parameters = parameters + self.libraries = libraries -class FtpServerLinkedService(LinkedService): - """A FTP server Linked Service. +class DataFlow(msrest.serialization.Model): + """Azure Synapse nested object which contains a flow with data movements and transformations. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: MappingDataFlow. All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Type of data flow.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. + :param description: The description of the data flow. :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. + :param annotations: List of tags that can be used for describing the data flow. :type annotations: list[object] - :param host: Required. Host name of the FTP server. Type: string (or Expression with resultType - string). - :type host: object - :param port: The TCP port number that the FTP server uses to listen for client connections. - Default value is 21. Type: integer (or Expression with resultType integer), minimum: 0. - :type port: object - :param authentication_type: The authentication type to be used to connect to the FTP server. - Possible values include: "Basic", "Anonymous". - :type authentication_type: str or ~azure.synapse.artifacts.models.FtpAuthenticationType - :param user_name: Username to logon the FTP server. Type: string (or Expression with resultType - string). - :type user_name: object - :param password: Password to logon the FTP server. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - :param enable_ssl: If true, connect to the FTP server over SSL/TLS channel. Default value is - true. Type: boolean (or Expression with resultType boolean). - :type enable_ssl: object - :param enable_server_certificate_validation: If true, validate the FTP server SSL certificate - when connect over SSL/TLS channel. Default value is true. Type: boolean (or Expression with - resultType boolean). - :type enable_server_certificate_validation: object + :param folder: The folder that this data flow is in. If not specified, Data flow will appear at + the root level. + :type folder: ~azure.synapse.artifacts.models.DataFlowFolder """ _validation = { 'type': {'required': True}, - 'host': {'required': True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, + 'folder': {'key': 'folder', 'type': 'DataFlowFolder'}, + } + + _subtype_map = { + 'type': {'MappingDataFlow': 'MappingDataFlow'} } def __init__( self, *, - host: object, - additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, - port: Optional[object] = None, - authentication_type: Optional[Union[str, "FtpAuthenticationType"]] = None, - user_name: Optional[object] = None, - password: Optional["SecretBase"] = None, - encrypted_credential: Optional[object] = None, - enable_ssl: Optional[object] = None, - enable_server_certificate_validation: Optional[object] = None, + folder: Optional["DataFlowFolder"] = None, **kwargs ): - super(FtpServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'FtpServer' # type: str - self.host = host - self.port = port - self.authentication_type = authentication_type - self.user_name = user_name - self.password = password - self.encrypted_credential = encrypted_credential - self.enable_ssl = enable_ssl - self.enable_server_certificate_validation = enable_server_certificate_validation + super(DataFlow, self).__init__(**kwargs) + self.type = None # type: Optional[str] + self.description = description + self.annotations = annotations + self.folder = folder -class GetMetadataActivity(ExecutionActivity): - """Activity to get metadata of dataset. +class DataFlowDebugCommandRequest(msrest.serialization.Model): + """Request body structure for data flow expression preview. All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param dataset: Required. GetMetadata activity dataset reference. - :type dataset: ~azure.synapse.artifacts.models.DatasetReference - :param field_list: Fields of metadata to get from dataset. - :type field_list: list[object] + :param session_id: Required. The ID of data flow debug session. + :type session_id: str + :param data_flow_name: The data flow which contains the debug session. + :type data_flow_name: str + :param command_name: The command name. + :type command_name: str + :param command_payload: Required. The command payload object. + :type command_payload: object """ _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'dataset': {'required': True}, + 'session_id': {'required': True}, + 'command_payload': {'required': True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, - 'field_list': {'key': 'typeProperties.fieldList', 'type': '[object]'}, + 'session_id': {'key': 'sessionId', 'type': 'str'}, + 'data_flow_name': {'key': 'dataFlowName', 'type': 'str'}, + 'command_name': {'key': 'commandName', 'type': 'str'}, + 'command_payload': {'key': 'commandPayload', 'type': 'object'}, } def __init__( self, *, - name: str, - dataset: "DatasetReference", - additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - depends_on: Optional[List["ActivityDependency"]] = None, - user_properties: Optional[List["UserProperty"]] = None, - linked_service_name: Optional["LinkedServiceReference"] = None, - policy: Optional["ActivityPolicy"] = None, - field_list: Optional[List[object]] = None, + session_id: str, + command_payload: object, + data_flow_name: Optional[str] = None, + command_name: Optional[str] = None, **kwargs ): - super(GetMetadataActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type = 'GetMetadata' # type: str - self.dataset = dataset - self.field_list = field_list + super(DataFlowDebugCommandRequest, self).__init__(**kwargs) + self.session_id = session_id + self.data_flow_name = data_flow_name + self.command_name = command_name + self.command_payload = command_payload -class GetSsisObjectMetadataRequest(msrest.serialization.Model): - """The request payload of get SSIS object metadata. +class DataFlowDebugCommandResponse(msrest.serialization.Model): + """Response body structure of data flow result for data preview, statistics or expression preview. - :param metadata_path: Metadata path. - :type metadata_path: str + :param status: The run status of data preview, statistics or expression preview. + :type status: str + :param data: The result data of data preview, statistics or expression preview. + :type data: str """ _attribute_map = { - 'metadata_path': {'key': 'metadataPath', 'type': 'str'}, + 'status': {'key': 'status', 'type': 'str'}, + 'data': {'key': 'data', 'type': 'str'}, } def __init__( self, *, - metadata_path: Optional[str] = None, + status: Optional[str] = None, + data: Optional[str] = None, **kwargs ): - super(GetSsisObjectMetadataRequest, self).__init__(**kwargs) - self.metadata_path = metadata_path - + super(DataFlowDebugCommandResponse, self).__init__(**kwargs) + self.status = status + self.data = data -class GoogleAdWordsLinkedService(LinkedService): - """Google AdWords service linked service. - All required parameters must be populated in order to send to Azure. +class DataFlowDebugPackage(msrest.serialization.Model): + """Request body structure for starting data flow debug session. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param client_customer_id: Required. The Client customer ID of the AdWords account that you - want to fetch report data for. - :type client_customer_id: object - :param developer_token: Required. The developer token associated with the manager account that - you use to grant access to the AdWords API. - :type developer_token: ~azure.synapse.artifacts.models.SecretBase - :param authentication_type: Required. The OAuth 2.0 authentication mechanism used for - authentication. ServiceAuthentication can only be used on self-hosted IR. Possible values - include: "ServiceAuthentication", "UserAuthentication". - :type authentication_type: str or - ~azure.synapse.artifacts.models.GoogleAdWordsAuthenticationType - :param refresh_token: The refresh token obtained from Google for authorizing access to AdWords - for UserAuthentication. - :type refresh_token: ~azure.synapse.artifacts.models.SecretBase - :param client_id: The client id of the google application used to acquire the refresh token. - Type: string (or Expression with resultType string). - :type client_id: object - :param client_secret: The client secret of the google application used to acquire the refresh - token. - :type client_secret: ~azure.synapse.artifacts.models.SecretBase - :param email: The service account email ID that is used for ServiceAuthentication and can only - be used on self-hosted IR. - :type email: object - :param key_file_path: The full path to the .p12 key file that is used to authenticate the - service account email address and can only be used on self-hosted IR. - :type key_file_path: object - :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for - verifying the server when connecting over SSL. This property can only be set when using SSL on - self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust - store or from a specified PEM file. The default value is false. - :type use_system_trust_store: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'client_customer_id': {'required': True}, - 'developer_token': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, + :param session_id: The ID of data flow debug session. + :type session_id: str + :param data_flow: Data flow instance. + :type data_flow: ~azure.synapse.artifacts.models.DataFlowDebugResource + :param datasets: List of datasets. + :type datasets: list[~azure.synapse.artifacts.models.DatasetDebugResource] + :param linked_services: List of linked services. + :type linked_services: list[~azure.synapse.artifacts.models.LinkedServiceDebugResource] + :param staging: Staging info for debug session. + :type staging: ~azure.synapse.artifacts.models.DataFlowStagingInfo + :param debug_settings: Data flow debug settings. + :type debug_settings: ~azure.synapse.artifacts.models.DataFlowDebugPackageDebugSettings + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'session_id': {'key': 'sessionId', 'type': 'str'}, + 'data_flow': {'key': 'dataFlow', 'type': 'DataFlowDebugResource'}, + 'datasets': {'key': 'datasets', 'type': '[DatasetDebugResource]'}, + 'linked_services': {'key': 'linkedServices', 'type': '[LinkedServiceDebugResource]'}, + 'staging': {'key': 'staging', 'type': 'DataFlowStagingInfo'}, + 'debug_settings': {'key': 'debugSettings', 'type': 'DataFlowDebugPackageDebugSettings'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + session_id: Optional[str] = None, + data_flow: Optional["DataFlowDebugResource"] = None, + datasets: Optional[List["DatasetDebugResource"]] = None, + linked_services: Optional[List["LinkedServiceDebugResource"]] = None, + staging: Optional["DataFlowStagingInfo"] = None, + debug_settings: Optional["DataFlowDebugPackageDebugSettings"] = None, + **kwargs + ): + super(DataFlowDebugPackage, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.session_id = session_id + self.data_flow = data_flow + self.datasets = datasets + self.linked_services = linked_services + self.staging = staging + self.debug_settings = debug_settings + + +class DataFlowDebugPackageDebugSettings(msrest.serialization.Model): + """Data flow debug settings. + + :param source_settings: Source setting for data flow debug. + :type source_settings: list[~azure.synapse.artifacts.models.DataFlowSourceSetting] + :param parameters: Data flow parameters. + :type parameters: dict[str, object] + :param dataset_parameters: Parameters for dataset. + :type dataset_parameters: object + """ + + _attribute_map = { + 'source_settings': {'key': 'sourceSettings', 'type': '[DataFlowSourceSetting]'}, + 'parameters': {'key': 'parameters', 'type': '{object}'}, + 'dataset_parameters': {'key': 'datasetParameters', 'type': 'object'}, + } + + def __init__( + self, + *, + source_settings: Optional[List["DataFlowSourceSetting"]] = None, + parameters: Optional[Dict[str, object]] = None, + dataset_parameters: Optional[object] = None, + **kwargs + ): + super(DataFlowDebugPackageDebugSettings, self).__init__(**kwargs) + self.source_settings = source_settings + self.parameters = parameters + self.dataset_parameters = dataset_parameters + + +class DataFlowDebugPreviewDataRequest(msrest.serialization.Model): + """Request body structure for data flow preview data. + + :param session_id: The ID of data flow debug session. + :type session_id: str + :param data_flow_name: The data flow which contains the debug session. + :type data_flow_name: str + :param stream_name: The output stream name. + :type stream_name: str + :param row_limits: The row limit for preview request. + :type row_limits: int + """ + + _attribute_map = { + 'session_id': {'key': 'sessionId', 'type': 'str'}, + 'data_flow_name': {'key': 'dataFlowName', 'type': 'str'}, + 'stream_name': {'key': 'streamName', 'type': 'str'}, + 'row_limits': {'key': 'rowLimits', 'type': 'int'}, + } + + def __init__( + self, + *, + session_id: Optional[str] = None, + data_flow_name: Optional[str] = None, + stream_name: Optional[str] = None, + row_limits: Optional[int] = None, + **kwargs + ): + super(DataFlowDebugPreviewDataRequest, self).__init__(**kwargs) + self.session_id = session_id + self.data_flow_name = data_flow_name + self.stream_name = stream_name + self.row_limits = row_limits + + +class DataFlowDebugQueryResponse(msrest.serialization.Model): + """Response body structure of data flow query for data preview, statistics or expression preview. + + :param run_id: The run ID of data flow debug session. + :type run_id: str + """ + + _attribute_map = { + 'run_id': {'key': 'runId', 'type': 'str'}, + } + + def __init__( + self, + *, + run_id: Optional[str] = None, + **kwargs + ): + super(DataFlowDebugQueryResponse, self).__init__(**kwargs) + self.run_id = run_id + + +class SubResourceDebugResource(msrest.serialization.Model): + """Azure Synapse nested debug resource. + + :param name: The resource name. + :type name: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + } + + def __init__( + self, + *, + name: Optional[str] = None, + **kwargs + ): + super(SubResourceDebugResource, self).__init__(**kwargs) + self.name = name + + +class DataFlowDebugResource(SubResourceDebugResource): + """Data flow debug resource. + + All required parameters must be populated in order to send to Azure. + + :param name: The resource name. + :type name: str + :param properties: Required. Data flow properties. + :type properties: ~azure.synapse.artifacts.models.DataFlow + """ + + _validation = { + 'properties': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'DataFlow'}, + } + + def __init__( + self, + *, + properties: "DataFlow", + name: Optional[str] = None, + **kwargs + ): + super(DataFlowDebugResource, self).__init__(name=name, **kwargs) + self.properties = properties + + +class DataFlowDebugResultResponse(msrest.serialization.Model): + """Response body structure of data flow result for data preview, statistics or expression preview. + + :param status: The run status of data preview, statistics or expression preview. + :type status: str + :param data: The result data of data preview, statistics or expression preview. + :type data: str + """ + + _attribute_map = { + 'status': {'key': 'status', 'type': 'str'}, + 'data': {'key': 'data', 'type': 'str'}, + } + + def __init__( + self, + *, + status: Optional[str] = None, + data: Optional[str] = None, + **kwargs + ): + super(DataFlowDebugResultResponse, self).__init__(**kwargs) + self.status = status + self.data = data + + +class DataFlowDebugSessionInfo(msrest.serialization.Model): + """Data flow debug session info. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param data_flow_name: The name of the data flow. + :type data_flow_name: str + :param compute_type: Compute type of the cluster. + :type compute_type: str + :param core_count: Core count of the cluster. + :type core_count: int + :param node_count: Node count of the cluster. (deprecated property). + :type node_count: int + :param integration_runtime_name: Attached integration runtime name of data flow debug session. + :type integration_runtime_name: str + :param session_id: The ID of data flow debug session. + :type session_id: str + :param start_time: Start time of data flow debug session. + :type start_time: str + :param time_to_live_in_minutes: Compute type of the cluster. + :type time_to_live_in_minutes: int + :param last_activity_time: Last activity time of data flow debug session. + :type last_activity_time: str + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'data_flow_name': {'key': 'dataFlowName', 'type': 'str'}, + 'compute_type': {'key': 'computeType', 'type': 'str'}, + 'core_count': {'key': 'coreCount', 'type': 'int'}, + 'node_count': {'key': 'nodeCount', 'type': 'int'}, + 'integration_runtime_name': {'key': 'integrationRuntimeName', 'type': 'str'}, + 'session_id': {'key': 'sessionId', 'type': 'str'}, + 'start_time': {'key': 'startTime', 'type': 'str'}, + 'time_to_live_in_minutes': {'key': 'timeToLiveInMinutes', 'type': 'int'}, + 'last_activity_time': {'key': 'lastActivityTime', 'type': 'str'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + data_flow_name: Optional[str] = None, + compute_type: Optional[str] = None, + core_count: Optional[int] = None, + node_count: Optional[int] = None, + integration_runtime_name: Optional[str] = None, + session_id: Optional[str] = None, + start_time: Optional[str] = None, + time_to_live_in_minutes: Optional[int] = None, + last_activity_time: Optional[str] = None, + **kwargs + ): + super(DataFlowDebugSessionInfo, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.data_flow_name = data_flow_name + self.compute_type = compute_type + self.core_count = core_count + self.node_count = node_count + self.integration_runtime_name = integration_runtime_name + self.session_id = session_id + self.start_time = start_time + self.time_to_live_in_minutes = time_to_live_in_minutes + self.last_activity_time = last_activity_time + + +class DataFlowDebugStatisticsRequest(msrest.serialization.Model): + """Request body structure for data flow statistics. + + :param session_id: The ID of data flow debug session. + :type session_id: str + :param data_flow_name: The data flow which contains the debug session. + :type data_flow_name: str + :param stream_name: The output stream name. + :type stream_name: str + :param columns: List of column names. + :type columns: list[str] + """ + + _attribute_map = { + 'session_id': {'key': 'sessionId', 'type': 'str'}, + 'data_flow_name': {'key': 'dataFlowName', 'type': 'str'}, + 'stream_name': {'key': 'streamName', 'type': 'str'}, + 'columns': {'key': 'columns', 'type': '[str]'}, + } + + def __init__( + self, + *, + session_id: Optional[str] = None, + data_flow_name: Optional[str] = None, + stream_name: Optional[str] = None, + columns: Optional[List[str]] = None, + **kwargs + ): + super(DataFlowDebugStatisticsRequest, self).__init__(**kwargs) + self.session_id = session_id + self.data_flow_name = data_flow_name + self.stream_name = stream_name + self.columns = columns + + +class DataFlowFolder(msrest.serialization.Model): + """The folder that this data flow is in. If not specified, Data flow will appear at the root level. + + :param name: The name of the folder that this data flow is in. + :type name: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + } + + def __init__( + self, + *, + name: Optional[str] = None, + **kwargs + ): + super(DataFlowFolder, self).__init__(**kwargs) + self.name = name + + +class DataFlowListResponse(msrest.serialization.Model): + """A list of data flow resources. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of data flows. + :type value: list[~azure.synapse.artifacts.models.DataFlowResource] + :param next_link: The link to the next page of results, if any remaining results exist. + :type next_link: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[DataFlowResource]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + *, + value: List["DataFlowResource"], + next_link: Optional[str] = None, + **kwargs + ): + super(DataFlowListResponse, self).__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class DataFlowReference(msrest.serialization.Model): + """Data flow reference type. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Data flow reference type. Possible values include: "DataFlowReference". + :type type: str or ~azure.synapse.artifacts.models.DataFlowReferenceType + :param reference_name: Required. Reference data flow name. + :type reference_name: str + :param dataset_parameters: Reference data flow parameters from dataset. + :type dataset_parameters: object + """ + + _validation = { + 'type': {'required': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + 'dataset_parameters': {'key': 'datasetParameters', 'type': 'object'}, + } + + def __init__( + self, + *, + type: Union[str, "DataFlowReferenceType"], + reference_name: str, + additional_properties: Optional[Dict[str, object]] = None, + dataset_parameters: Optional[object] = None, + **kwargs + ): + super(DataFlowReference, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = type + self.reference_name = reference_name + self.dataset_parameters = dataset_parameters + + +class DataFlowResource(AzureEntityResource): + """Data flow resource type. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + :ivar etag: Resource Etag. + :vartype etag: str + :param properties: Required. Data flow properties. + :type properties: ~azure.synapse.artifacts.models.DataFlow + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'DataFlow'}, + } + + def __init__( + self, + *, + properties: "DataFlow", + **kwargs + ): + super(DataFlowResource, self).__init__(**kwargs) + self.properties = properties + + +class Transformation(msrest.serialization.Model): + """A data flow transformation. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. Transformation name. + :type name: str + :param description: Transformation description. + :type description: str + """ + + _validation = { + 'name': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + } + + def __init__( + self, + *, + name: str, + description: Optional[str] = None, + **kwargs + ): + super(Transformation, self).__init__(**kwargs) + self.name = name + self.description = description + + +class DataFlowSink(Transformation): + """Transformation for data flow sink. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. Transformation name. + :type name: str + :param description: Transformation description. + :type description: str + :param dataset: Dataset reference. + :type dataset: ~azure.synapse.artifacts.models.DatasetReference + """ + + _validation = { + 'name': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'dataset': {'key': 'dataset', 'type': 'DatasetReference'}, + } + + def __init__( + self, + *, + name: str, + description: Optional[str] = None, + dataset: Optional["DatasetReference"] = None, + **kwargs + ): + super(DataFlowSink, self).__init__(name=name, description=description, **kwargs) + self.dataset = dataset + + +class DataFlowSource(Transformation): + """Transformation for data flow source. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. Transformation name. + :type name: str + :param description: Transformation description. + :type description: str + :param dataset: Dataset reference. + :type dataset: ~azure.synapse.artifacts.models.DatasetReference + """ + + _validation = { + 'name': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'dataset': {'key': 'dataset', 'type': 'DatasetReference'}, + } + + def __init__( + self, + *, + name: str, + description: Optional[str] = None, + dataset: Optional["DatasetReference"] = None, + **kwargs + ): + super(DataFlowSource, self).__init__(name=name, description=description, **kwargs) + self.dataset = dataset + + +class DataFlowSourceSetting(msrest.serialization.Model): + """Definition of data flow source setting for debug. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param source_name: The data flow source name. + :type source_name: str + :param row_limit: Defines the row limit of data flow source in debug. + :type row_limit: int + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_name': {'key': 'sourceName', 'type': 'str'}, + 'row_limit': {'key': 'rowLimit', 'type': 'int'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_name: Optional[str] = None, + row_limit: Optional[int] = None, + **kwargs + ): + super(DataFlowSourceSetting, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.source_name = source_name + self.row_limit = row_limit + + +class DataFlowStagingInfo(msrest.serialization.Model): + """Staging info for execute data flow activity. + + :param linked_service: Staging linked service reference. + :type linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference + :param folder_path: Folder path for staging blob. + :type folder_path: str + """ + + _attribute_map = { + 'linked_service': {'key': 'linkedService', 'type': 'LinkedServiceReference'}, + 'folder_path': {'key': 'folderPath', 'type': 'str'}, + } + + def __init__( + self, + *, + linked_service: Optional["LinkedServiceReference"] = None, + folder_path: Optional[str] = None, + **kwargs + ): + super(DataFlowStagingInfo, self).__init__(**kwargs) + self.linked_service = linked_service + self.folder_path = folder_path + + +class DataLakeAnalyticsUSQLActivity(ExecutionActivity): + """Data Lake Analytics U-SQL activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param linked_service_name: Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param script_path: Required. Case-sensitive path to folder that contains the U-SQL script. + Type: string (or Expression with resultType string). + :type script_path: object + :param script_linked_service: Required. Script linked service reference. + :type script_linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference + :param degree_of_parallelism: The maximum number of nodes simultaneously used to run the job. + Default value is 1. Type: integer (or Expression with resultType integer), minimum: 1. + :type degree_of_parallelism: object + :param priority: Determines which jobs out of all that are queued should be selected to run + first. The lower the number, the higher the priority. Default value is 1000. Type: integer (or + Expression with resultType integer), minimum: 1. + :type priority: object + :param parameters: Parameters for U-SQL job request. + :type parameters: dict[str, object] + :param runtime_version: Runtime version of the U-SQL engine to use. Type: string (or Expression + with resultType string). + :type runtime_version: object + :param compilation_mode: Compilation mode of U-SQL. Must be one of these values : Semantic, + Full and SingleBox. Type: string (or Expression with resultType string). + :type compilation_mode: object + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'script_path': {'required': True}, + 'script_linked_service': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'script_path': {'key': 'typeProperties.scriptPath', 'type': 'object'}, + 'script_linked_service': {'key': 'typeProperties.scriptLinkedService', 'type': 'LinkedServiceReference'}, + 'degree_of_parallelism': {'key': 'typeProperties.degreeOfParallelism', 'type': 'object'}, + 'priority': {'key': 'typeProperties.priority', 'type': 'object'}, + 'parameters': {'key': 'typeProperties.parameters', 'type': '{object}'}, + 'runtime_version': {'key': 'typeProperties.runtimeVersion', 'type': 'object'}, + 'compilation_mode': {'key': 'typeProperties.compilationMode', 'type': 'object'}, + } + + def __init__( + self, + *, + name: str, + script_path: object, + script_linked_service: "LinkedServiceReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + depends_on: Optional[List["ActivityDependency"]] = None, + user_properties: Optional[List["UserProperty"]] = None, + linked_service_name: Optional["LinkedServiceReference"] = None, + policy: Optional["ActivityPolicy"] = None, + degree_of_parallelism: Optional[object] = None, + priority: Optional[object] = None, + parameters: Optional[Dict[str, object]] = None, + runtime_version: Optional[object] = None, + compilation_mode: Optional[object] = None, + **kwargs + ): + super(DataLakeAnalyticsUSQLActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.type = 'DataLakeAnalyticsU-SQL' # type: str + self.script_path = script_path + self.script_linked_service = script_linked_service + self.degree_of_parallelism = degree_of_parallelism + self.priority = priority + self.parameters = parameters + self.runtime_version = runtime_version + self.compilation_mode = compilation_mode + + +class DataLakeStorageAccountDetails(msrest.serialization.Model): + """Details of the data lake storage account associated with the workspace. + + :param account_url: Account URL. + :type account_url: str + :param filesystem: Filesystem name. + :type filesystem: str + """ + + _attribute_map = { + 'account_url': {'key': 'accountUrl', 'type': 'str'}, + 'filesystem': {'key': 'filesystem', 'type': 'str'}, + } + + def __init__( + self, + *, + account_url: Optional[str] = None, + filesystem: Optional[str] = None, + **kwargs + ): + super(DataLakeStorageAccountDetails, self).__init__(**kwargs) + self.account_url = account_url + self.filesystem = filesystem + + +class DatasetCompression(msrest.serialization.Model): + """The compression method used on a dataset. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: DatasetBZip2Compression, DatasetDeflateCompression, DatasetGZipCompression, DatasetZipDeflateCompression. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset compression.Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'BZip2': 'DatasetBZip2Compression', 'Deflate': 'DatasetDeflateCompression', 'GZip': 'DatasetGZipCompression', 'ZipDeflate': 'DatasetZipDeflateCompression'} + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + **kwargs + ): + super(DatasetCompression, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = 'DatasetCompression' # type: str + + +class DatasetBZip2Compression(DatasetCompression): + """The BZip2 compression method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset compression.Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + **kwargs + ): + super(DatasetBZip2Compression, self).__init__(additional_properties=additional_properties, **kwargs) + self.type = 'BZip2' # type: str + + +class DatasetDataElement(msrest.serialization.Model): + """Columns that define the structure of the dataset. + + :param name: Name of the column. Type: string (or Expression with resultType string). + :type name: object + :param type: Type of the column. Type: string (or Expression with resultType string). + :type type: object + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'object'}, + } + + def __init__( + self, + *, + name: Optional[object] = None, + type: Optional[object] = None, + **kwargs + ): + super(DatasetDataElement, self).__init__(**kwargs) + self.name = name + self.type = type + + +class DatasetDebugResource(SubResourceDebugResource): + """Dataset debug resource. + + All required parameters must be populated in order to send to Azure. + + :param name: The resource name. + :type name: str + :param properties: Required. Dataset properties. + :type properties: ~azure.synapse.artifacts.models.Dataset + """ + + _validation = { + 'properties': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'Dataset'}, + } + + def __init__( + self, + *, + properties: "Dataset", + name: Optional[str] = None, + **kwargs + ): + super(DatasetDebugResource, self).__init__(name=name, **kwargs) + self.properties = properties + + +class DatasetDeflateCompression(DatasetCompression): + """The Deflate compression method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset compression.Constant filled by server. + :type type: str + :param level: The Deflate compression level. Possible values include: "Optimal", "Fastest". + :type level: str or ~azure.synapse.artifacts.models.DatasetCompressionLevel + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'str'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + level: Optional[Union[str, "DatasetCompressionLevel"]] = None, + **kwargs + ): + super(DatasetDeflateCompression, self).__init__(additional_properties=additional_properties, **kwargs) + self.type = 'Deflate' # type: str + self.level = level + + +class DatasetFolder(msrest.serialization.Model): + """The folder that this Dataset is in. If not specified, Dataset will appear at the root level. + + :param name: The name of the folder that this Dataset is in. + :type name: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + } + + def __init__( + self, + *, + name: Optional[str] = None, + **kwargs + ): + super(DatasetFolder, self).__init__(**kwargs) + self.name = name + + +class DatasetGZipCompression(DatasetCompression): + """The GZip compression method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset compression.Constant filled by server. + :type type: str + :param level: The GZip compression level. Possible values include: "Optimal", "Fastest". + :type level: str or ~azure.synapse.artifacts.models.DatasetCompressionLevel + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'str'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + level: Optional[Union[str, "DatasetCompressionLevel"]] = None, + **kwargs + ): + super(DatasetGZipCompression, self).__init__(additional_properties=additional_properties, **kwargs) + self.type = 'GZip' # type: str + self.level = level + + +class DatasetListResponse(msrest.serialization.Model): + """A list of dataset resources. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of datasets. + :type value: list[~azure.synapse.artifacts.models.DatasetResource] + :param next_link: The link to the next page of results, if any remaining results exist. + :type next_link: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[DatasetResource]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + *, + value: List["DatasetResource"], + next_link: Optional[str] = None, + **kwargs + ): + super(DatasetListResponse, self).__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class DatasetReference(msrest.serialization.Model): + """Dataset reference type. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Dataset reference type. Possible values include: "DatasetReference". + :type type: str or ~azure.synapse.artifacts.models.DatasetReferenceType + :param reference_name: Required. Reference dataset name. + :type reference_name: str + :param parameters: Arguments for dataset. + :type parameters: dict[str, object] + """ + + _validation = { + 'type': {'required': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{object}'}, + } + + def __init__( + self, + *, + type: Union[str, "DatasetReferenceType"], + reference_name: str, + parameters: Optional[Dict[str, object]] = None, + **kwargs + ): + super(DatasetReference, self).__init__(**kwargs) + self.type = type + self.reference_name = reference_name + self.parameters = parameters + + +class DatasetResource(AzureEntityResource): + """Dataset resource type. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + :ivar etag: Resource Etag. + :vartype etag: str + :param properties: Required. Dataset properties. + :type properties: ~azure.synapse.artifacts.models.Dataset + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'Dataset'}, + } + + def __init__( + self, + *, + properties: "Dataset", + **kwargs + ): + super(DatasetResource, self).__init__(**kwargs) + self.properties = properties + + +class DatasetSchemaDataElement(msrest.serialization.Model): + """Columns that define the physical type schema of the dataset. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Name of the schema column. Type: string (or Expression with resultType string). + :type name: object + :param type: Type of the schema column. Type: string (or Expression with resultType string). + :type type: object + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + name: Optional[object] = None, + type: Optional[object] = None, + **kwargs + ): + super(DatasetSchemaDataElement, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.name = name + self.type = type + + +class DatasetZipDeflateCompression(DatasetCompression): + """The ZipDeflate compression method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset compression.Constant filled by server. + :type type: str + :param level: The ZipDeflate compression level. Possible values include: "Optimal", "Fastest". + :type level: str or ~azure.synapse.artifacts.models.DatasetCompressionLevel + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'str'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + level: Optional[Union[str, "DatasetCompressionLevel"]] = None, + **kwargs + ): + super(DatasetZipDeflateCompression, self).__init__(additional_properties=additional_properties, **kwargs) + self.type = 'ZipDeflate' # type: str + self.level = level + + +class Db2LinkedService(LinkedService): + """Linked service for DB2 data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param server: Required. Server name for connection. Type: string (or Expression with + resultType string). + :type server: object + :param database: Required. Database name for connection. Type: string (or Expression with + resultType string). + :type database: object + :param authentication_type: AuthenticationType to be used for connection. Possible values + include: "Basic". + :type authentication_type: str or ~azure.synapse.artifacts.models.Db2AuthenticationType + :param username: Username for authentication. Type: string (or Expression with resultType + string). + :type username: object + :param password: Password for authentication. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param package_collection: Under where packages are created when querying database. Type: + string (or Expression with resultType string). + :type package_collection: object + :param certificate_common_name: Certificate Common Name when TLS is enabled. Type: string (or + Expression with resultType string). + :type certificate_common_name: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'server': {'required': True}, + 'database': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'package_collection': {'key': 'typeProperties.packageCollection', 'type': 'object'}, + 'certificate_common_name': {'key': 'typeProperties.certificateCommonName', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + *, + server: object, + database: object, + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + authentication_type: Optional[Union[str, "Db2AuthenticationType"]] = None, + username: Optional[object] = None, + password: Optional["SecretBase"] = None, + package_collection: Optional[object] = None, + certificate_common_name: Optional[object] = None, + encrypted_credential: Optional[object] = None, + **kwargs + ): + super(Db2LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'Db2' # type: str + self.server = server + self.database = database + self.authentication_type = authentication_type + self.username = username + self.password = password + self.package_collection = package_collection + self.certificate_common_name = certificate_common_name + self.encrypted_credential = encrypted_credential + + +class Db2Source(TabularSource): + """A copy activity source for Db2 databases. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: Database query. Type: string (or Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(Db2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'Db2Source' # type: str + self.query = query + + +class Db2TableDataset(Dataset): + """The Db2 table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: This property will be retired. Please consider using schema + table + properties instead. + :type table_name: object + :param schema_type_properties_schema: The Db2 schema name. Type: string (or Expression with + resultType string). + :type schema_type_properties_schema: object + :param table: The Db2 table name. Type: string (or Expression with resultType string). + :type table: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__( + self, + *, + linked_service_name: "LinkedServiceReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + table_name: Optional[object] = None, + schema_type_properties_schema: Optional[object] = None, + table: Optional[object] = None, + **kwargs + ): + super(Db2TableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'Db2Table' # type: str + self.table_name = table_name + self.schema_type_properties_schema = schema_type_properties_schema + self.table = table + + +class DeleteActivity(ExecutionActivity): + """Delete activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param linked_service_name: Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param recursive: If true, files or sub-folders under current folder path will be deleted + recursively. Default is false. Type: boolean (or Expression with resultType boolean). + :type recursive: object + :param max_concurrent_connections: The max concurrent connections to connect data source at the + same time. + :type max_concurrent_connections: int + :param enable_logging: Whether to record detailed logs of delete-activity execution. Default + value is false. Type: boolean (or Expression with resultType boolean). + :type enable_logging: object + :param log_storage_settings: Log storage settings customer need to provide when enableLogging + is true. + :type log_storage_settings: ~azure.synapse.artifacts.models.LogStorageSettings + :param dataset: Required. Delete activity dataset reference. + :type dataset: ~azure.synapse.artifacts.models.DatasetReference + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'max_concurrent_connections': {'minimum': 1}, + 'dataset': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'recursive': {'key': 'typeProperties.recursive', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'typeProperties.maxConcurrentConnections', 'type': 'int'}, + 'enable_logging': {'key': 'typeProperties.enableLogging', 'type': 'object'}, + 'log_storage_settings': {'key': 'typeProperties.logStorageSettings', 'type': 'LogStorageSettings'}, + 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, + } + + def __init__( + self, + *, + name: str, + dataset: "DatasetReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + depends_on: Optional[List["ActivityDependency"]] = None, + user_properties: Optional[List["UserProperty"]] = None, + linked_service_name: Optional["LinkedServiceReference"] = None, + policy: Optional["ActivityPolicy"] = None, + recursive: Optional[object] = None, + max_concurrent_connections: Optional[int] = None, + enable_logging: Optional[object] = None, + log_storage_settings: Optional["LogStorageSettings"] = None, + **kwargs + ): + super(DeleteActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.type = 'Delete' # type: str + self.recursive = recursive + self.max_concurrent_connections = max_concurrent_connections + self.enable_logging = enable_logging + self.log_storage_settings = log_storage_settings + self.dataset = dataset + + +class DeleteDataFlowDebugSessionRequest(msrest.serialization.Model): + """Request body structure for deleting data flow debug session. + + :param session_id: The ID of data flow debug session. + :type session_id: str + :param data_flow_name: The data flow which contains the debug session. + :type data_flow_name: str + """ + + _attribute_map = { + 'session_id': {'key': 'sessionId', 'type': 'str'}, + 'data_flow_name': {'key': 'dataFlowName', 'type': 'str'}, + } + + def __init__( + self, + *, + session_id: Optional[str] = None, + data_flow_name: Optional[str] = None, + **kwargs + ): + super(DeleteDataFlowDebugSessionRequest, self).__init__(**kwargs) + self.session_id = session_id + self.data_flow_name = data_flow_name + + +class DelimitedTextDataset(Dataset): + """Delimited text dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param location: The location of the delimited text storage. + :type location: ~azure.synapse.artifacts.models.DatasetLocation + :param column_delimiter: The column delimiter. Type: string (or Expression with resultType + string). + :type column_delimiter: object + :param row_delimiter: The row delimiter. Type: string (or Expression with resultType string). + :type row_delimiter: object + :param encoding_name: The code page name of the preferred encoding. If miss, the default value + is UTF-8, unless BOM denotes another Unicode encoding. Refer to the name column of the table in + the following link to set supported values: + https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with + resultType string). + :type encoding_name: object + :param compression_codec: Possible values include: "bzip2", "gzip", "deflate", "zipDeflate", + "snappy", "lz4". + :type compression_codec: str or ~azure.synapse.artifacts.models.DelimitedTextCompressionCodec + :param compression_level: The data compression method used for DelimitedText. Possible values + include: "Optimal", "Fastest". + :type compression_level: str or ~azure.synapse.artifacts.models.DatasetCompressionLevel + :param quote_char: The quote character. Type: string (or Expression with resultType string). + :type quote_char: object + :param escape_char: The escape character. Type: string (or Expression with resultType string). + :type escape_char: object + :param first_row_as_header: When used as input, treat the first row of data as headers. When + used as output,write the headers into the output as the first row of data. The default value is + false. Type: boolean (or Expression with resultType boolean). + :type first_row_as_header: object + :param null_value: The null value string. Type: string (or Expression with resultType string). + :type null_value: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'column_delimiter': {'key': 'typeProperties.columnDelimiter', 'type': 'object'}, + 'row_delimiter': {'key': 'typeProperties.rowDelimiter', 'type': 'object'}, + 'encoding_name': {'key': 'typeProperties.encodingName', 'type': 'object'}, + 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'str'}, + 'compression_level': {'key': 'typeProperties.compressionLevel', 'type': 'str'}, + 'quote_char': {'key': 'typeProperties.quoteChar', 'type': 'object'}, + 'escape_char': {'key': 'typeProperties.escapeChar', 'type': 'object'}, + 'first_row_as_header': {'key': 'typeProperties.firstRowAsHeader', 'type': 'object'}, + 'null_value': {'key': 'typeProperties.nullValue', 'type': 'object'}, + } + + def __init__( + self, + *, + linked_service_name: "LinkedServiceReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + location: Optional["DatasetLocation"] = None, + column_delimiter: Optional[object] = None, + row_delimiter: Optional[object] = None, + encoding_name: Optional[object] = None, + compression_codec: Optional[Union[str, "DelimitedTextCompressionCodec"]] = None, + compression_level: Optional[Union[str, "DatasetCompressionLevel"]] = None, + quote_char: Optional[object] = None, + escape_char: Optional[object] = None, + first_row_as_header: Optional[object] = None, + null_value: Optional[object] = None, + **kwargs + ): + super(DelimitedTextDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'DelimitedText' # type: str + self.location = location + self.column_delimiter = column_delimiter + self.row_delimiter = row_delimiter + self.encoding_name = encoding_name + self.compression_codec = compression_codec + self.compression_level = compression_level + self.quote_char = quote_char + self.escape_char = escape_char + self.first_row_as_header = first_row_as_header + self.null_value = null_value + + +class FormatReadSettings(msrest.serialization.Model): + """Format read settings. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: DelimitedTextReadSettings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The read setting type.Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'DelimitedTextReadSettings': 'DelimitedTextReadSettings'} + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + **kwargs + ): + super(FormatReadSettings, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = 'FormatReadSettings' # type: str + + +class DelimitedTextReadSettings(FormatReadSettings): + """Delimited text read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The read setting type.Constant filled by server. + :type type: str + :param skip_line_count: Indicates the number of non-empty rows to skip when reading data from + input files. Type: integer (or Expression with resultType integer). + :type skip_line_count: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'skip_line_count': {'key': 'skipLineCount', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + skip_line_count: Optional[object] = None, + **kwargs + ): + super(DelimitedTextReadSettings, self).__init__(additional_properties=additional_properties, **kwargs) + self.type = 'DelimitedTextReadSettings' # type: str + self.skip_line_count = skip_line_count + + +class DelimitedTextSink(CopySink): + """A copy activity DelimitedText sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param store_settings: DelimitedText store settings. + :type store_settings: ~azure.synapse.artifacts.models.StoreWriteSettings + :param format_settings: DelimitedText format settings. + :type format_settings: ~azure.synapse.artifacts.models.DelimitedTextWriteSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextWriteSettings'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + store_settings: Optional["StoreWriteSettings"] = None, + format_settings: Optional["DelimitedTextWriteSettings"] = None, + **kwargs + ): + super(DelimitedTextSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'DelimitedTextSink' # type: str + self.store_settings = store_settings + self.format_settings = format_settings + + +class DelimitedTextSource(CopySource): + """A copy activity DelimitedText source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param store_settings: DelimitedText store settings. + :type store_settings: ~azure.synapse.artifacts.models.StoreReadSettings + :param format_settings: DelimitedText format settings. + :type format_settings: ~azure.synapse.artifacts.models.DelimitedTextReadSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextReadSettings'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + store_settings: Optional["StoreReadSettings"] = None, + format_settings: Optional["DelimitedTextReadSettings"] = None, + **kwargs + ): + super(DelimitedTextSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'DelimitedTextSource' # type: str + self.store_settings = store_settings + self.format_settings = format_settings + + +class DelimitedTextWriteSettings(FormatWriteSettings): + """Delimited text write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The write setting type.Constant filled by server. + :type type: str + :param quote_all_text: Indicates whether string values should always be enclosed with quotes. + Type: boolean (or Expression with resultType boolean). + :type quote_all_text: object + :param file_extension: Required. The file extension used to create the files. Type: string (or + Expression with resultType string). + :type file_extension: object + """ + + _validation = { + 'type': {'required': True}, + 'file_extension': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'quote_all_text': {'key': 'quoteAllText', 'type': 'object'}, + 'file_extension': {'key': 'fileExtension', 'type': 'object'}, + } + + def __init__( + self, + *, + file_extension: object, + additional_properties: Optional[Dict[str, object]] = None, + quote_all_text: Optional[object] = None, + **kwargs + ): + super(DelimitedTextWriteSettings, self).__init__(additional_properties=additional_properties, **kwargs) + self.type = 'DelimitedTextWriteSettings' # type: str + self.quote_all_text = quote_all_text + self.file_extension = file_extension + + +class DependencyReference(msrest.serialization.Model): + """Referenced dependency. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: SelfDependencyTumblingWindowTriggerReference, TriggerDependencyReference. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. The type of dependency reference.Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'SelfDependencyTumblingWindowTriggerReference': 'SelfDependencyTumblingWindowTriggerReference', 'TriggerDependencyReference': 'TriggerDependencyReference'} + } + + def __init__( + self, + **kwargs + ): + super(DependencyReference, self).__init__(**kwargs) + self.type = None # type: Optional[str] + + +class DistcpSettings(msrest.serialization.Model): + """Distcp settings. + + All required parameters must be populated in order to send to Azure. + + :param resource_manager_endpoint: Required. Specifies the Yarn ResourceManager endpoint. Type: + string (or Expression with resultType string). + :type resource_manager_endpoint: object + :param temp_script_path: Required. Specifies an existing folder path which will be used to + store temp Distcp command script. The script file is generated by ADF and will be removed after + Copy job finished. Type: string (or Expression with resultType string). + :type temp_script_path: object + :param distcp_options: Specifies the Distcp options. Type: string (or Expression with + resultType string). + :type distcp_options: object + """ + + _validation = { + 'resource_manager_endpoint': {'required': True}, + 'temp_script_path': {'required': True}, + } + + _attribute_map = { + 'resource_manager_endpoint': {'key': 'resourceManagerEndpoint', 'type': 'object'}, + 'temp_script_path': {'key': 'tempScriptPath', 'type': 'object'}, + 'distcp_options': {'key': 'distcpOptions', 'type': 'object'}, + } + + def __init__( + self, + *, + resource_manager_endpoint: object, + temp_script_path: object, + distcp_options: Optional[object] = None, + **kwargs + ): + super(DistcpSettings, self).__init__(**kwargs) + self.resource_manager_endpoint = resource_manager_endpoint + self.temp_script_path = temp_script_path + self.distcp_options = distcp_options + + +class DocumentDbCollectionDataset(Dataset): + """Microsoft Azure Document Database Collection dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param collection_name: Required. Document Database collection name. Type: string (or + Expression with resultType string). + :type collection_name: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + 'collection_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'collection_name': {'key': 'typeProperties.collectionName', 'type': 'object'}, + } + + def __init__( + self, + *, + linked_service_name: "LinkedServiceReference", + collection_name: object, + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + **kwargs + ): + super(DocumentDbCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'DocumentDbCollection' # type: str + self.collection_name = collection_name + + +class DocumentDbCollectionSink(CopySink): + """A copy activity Document Database Collection sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param nesting_separator: Nested properties separator. Default is . (dot). Type: string (or + Expression with resultType string). + :type nesting_separator: object + :param write_behavior: Describes how to write data to Azure Cosmos DB. Type: string (or + Expression with resultType string). Allowed values: insert and upsert. + :type write_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + nesting_separator: Optional[object] = None, + write_behavior: Optional[object] = None, + **kwargs + ): + super(DocumentDbCollectionSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'DocumentDbCollectionSink' # type: str + self.nesting_separator = nesting_separator + self.write_behavior = write_behavior + + +class DocumentDbCollectionSource(CopySource): + """A copy activity Document Database Collection source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query: Documents query. Type: string (or Expression with resultType string). + :type query: object + :param nesting_separator: Nested properties separator. Type: string (or Expression with + resultType string). + :type nesting_separator: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query: Optional[object] = None, + nesting_separator: Optional[object] = None, + query_timeout: Optional[object] = None, + **kwargs + ): + super(DocumentDbCollectionSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'DocumentDbCollectionSource' # type: str + self.query = query + self.nesting_separator = nesting_separator + self.query_timeout = query_timeout + + +class DrillLinkedService(LinkedService): + """Drill server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param connection_string: An ODBC connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param pwd: The Azure key vault secret reference of password in connection string. + :type pwd: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + connection_string: Optional[object] = None, + pwd: Optional["AzureKeyVaultSecretReference"] = None, + encrypted_credential: Optional[object] = None, + **kwargs + ): + super(DrillLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'Drill' # type: str + self.connection_string = connection_string + self.pwd = pwd + self.encrypted_credential = encrypted_credential + + +class DrillSource(TabularSource): + """A copy activity Drill server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(DrillSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'DrillSource' # type: str + self.query = query + + +class DrillTableDataset(Dataset): + """Drill server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: This property will be retired. Please consider using schema + table + properties instead. + :type table_name: object + :param table: The table name of the Drill. Type: string (or Expression with resultType string). + :type table: object + :param schema_type_properties_schema: The schema name of the Drill. Type: string (or Expression + with resultType string). + :type schema_type_properties_schema: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__( + self, + *, + linked_service_name: "LinkedServiceReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + table_name: Optional[object] = None, + table: Optional[object] = None, + schema_type_properties_schema: Optional[object] = None, + **kwargs + ): + super(DrillTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'DrillTable' # type: str + self.table_name = table_name + self.table = table + self.schema_type_properties_schema = schema_type_properties_schema + + +class DWCopyCommandDefaultValue(msrest.serialization.Model): + """Default value. + + :param column_name: Column name. Type: object (or Expression with resultType string). + :type column_name: object + :param default_value: The default value of the column. Type: object (or Expression with + resultType string). + :type default_value: object + """ + + _attribute_map = { + 'column_name': {'key': 'columnName', 'type': 'object'}, + 'default_value': {'key': 'defaultValue', 'type': 'object'}, + } + + def __init__( + self, + *, + column_name: Optional[object] = None, + default_value: Optional[object] = None, + **kwargs + ): + super(DWCopyCommandDefaultValue, self).__init__(**kwargs) + self.column_name = column_name + self.default_value = default_value + + +class DWCopyCommandSettings(msrest.serialization.Model): + """DW Copy Command settings. + + :param default_values: Specifies the default values for each target column in SQL DW. The + default values in the property overwrite the DEFAULT constraint set in the DB, and identity + column cannot have a default value. Type: array of objects (or Expression with resultType array + of objects). + :type default_values: list[~azure.synapse.artifacts.models.DWCopyCommandDefaultValue] + :param additional_options: Additional options directly passed to SQL DW in Copy Command. Type: + key value pairs (value should be string type) (or Expression with resultType object). Example: + "additionalOptions": { "MAXERRORS": "1000", "DATEFORMAT": "'ymd'" }. + :type additional_options: dict[str, str] + """ + + _attribute_map = { + 'default_values': {'key': 'defaultValues', 'type': '[DWCopyCommandDefaultValue]'}, + 'additional_options': {'key': 'additionalOptions', 'type': '{str}'}, + } + + def __init__( + self, + *, + default_values: Optional[List["DWCopyCommandDefaultValue"]] = None, + additional_options: Optional[Dict[str, str]] = None, + **kwargs + ): + super(DWCopyCommandSettings, self).__init__(**kwargs) + self.default_values = default_values + self.additional_options = additional_options + + +class DynamicsAXLinkedService(LinkedService): + """Dynamics AX linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param url: Required. The Dynamics AX (or Dynamics 365 Finance and Operations) instance OData + endpoint. + :type url: object + :param service_principal_id: Required. Specify the application's client ID. Type: string (or + Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: Required. Specify the application's key. Mark this field as a + SecureString to store it securely in Data Factory, or reference a secret stored in Azure Key + Vault. Type: string (or Expression with resultType string). + :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase + :param tenant: Required. Specify the tenant information (domain name or tenant ID) under which + your application resides. Retrieve it by hovering the mouse in the top-right corner of the + Azure portal. Type: string (or Expression with resultType string). + :type tenant: object + :param aad_resource_id: Required. Specify the resource you are requesting authorization. Type: + string (or Expression with resultType string). + :type aad_resource_id: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + 'service_principal_id': {'required': True}, + 'service_principal_key': {'required': True}, + 'tenant': {'required': True}, + 'aad_resource_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + *, + url: object, + service_principal_id: object, + service_principal_key: "SecretBase", + tenant: object, + aad_resource_id: object, + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + encrypted_credential: Optional[object] = None, + **kwargs + ): + super(DynamicsAXLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'DynamicsAX' # type: str + self.url = url + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.aad_resource_id = aad_resource_id + self.encrypted_credential = encrypted_credential + + +class DynamicsAXResourceDataset(Dataset): + """The path of the Dynamics AX OData entity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param path: Required. The path of the Dynamics AX OData entity. Type: string (or Expression + with resultType string). + :type path: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + 'path': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'path': {'key': 'typeProperties.path', 'type': 'object'}, + } + + def __init__( + self, + *, + linked_service_name: "LinkedServiceReference", + path: object, + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + **kwargs + ): + super(DynamicsAXResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'DynamicsAXResource' # type: str + self.path = path + + +class DynamicsAXSource(TabularSource): + """A copy activity Dynamics AX source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(DynamicsAXSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'DynamicsAXSource' # type: str + self.query = query + + +class DynamicsCrmEntityDataset(Dataset): + """The Dynamics CRM entity dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param entity_name: The logical name of the entity. Type: string (or Expression with resultType + string). + :type entity_name: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, + } + + def __init__( + self, + *, + linked_service_name: "LinkedServiceReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + entity_name: Optional[object] = None, + **kwargs + ): + super(DynamicsCrmEntityDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'DynamicsCrmEntity' # type: str + self.entity_name = entity_name + + +class DynamicsCrmLinkedService(LinkedService): + """Dynamics CRM linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param deployment_type: Required. The deployment type of the Dynamics CRM instance. 'Online' + for Dynamics CRM Online and 'OnPremisesWithIfd' for Dynamics CRM on-premises with Ifd. Type: + string (or Expression with resultType string). Possible values include: "Online", + "OnPremisesWithIfd". + :type deployment_type: str or ~azure.synapse.artifacts.models.DynamicsDeploymentType + :param host_name: The host name of the on-premises Dynamics CRM server. The property is + required for on-prem and not allowed for online. Type: string (or Expression with resultType + string). + :type host_name: object + :param port: The port of on-premises Dynamics CRM server. The property is required for on-prem + and not allowed for online. Default is 443. Type: integer (or Expression with resultType + integer), minimum: 0. + :type port: object + :param service_uri: The URL to the Microsoft Dynamics CRM server. The property is required for + on-line and not allowed for on-prem. Type: string (or Expression with resultType string). + :type service_uri: object + :param organization_name: The organization name of the Dynamics CRM instance. The property is + required for on-prem and required for online when there are more than one Dynamics CRM + instances associated with the user. Type: string (or Expression with resultType string). + :type organization_name: object + :param authentication_type: Required. The authentication type to connect to Dynamics CRM + server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, + 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or + Expression with resultType string). Possible values include: "Office365", "Ifd", + "AADServicePrincipal". + :type authentication_type: str or ~azure.synapse.artifacts.models.DynamicsAuthenticationType + :param username: User name to access the Dynamics CRM instance. Type: string (or Expression + with resultType string). + :type username: object + :param password: Password to access the Dynamics CRM instance. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param service_principal_id: The client ID of the application in Azure Active Directory used + for Server-To-Server authentication. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_credential_type: The service principal credential type to use in + Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' + for certificate. Type: string (or Expression with resultType string). Possible values include: + "ServicePrincipalKey", "ServicePrincipalCert". + :type service_principal_credential_type: str or + ~azure.synapse.artifacts.models.DynamicsServicePrincipalCredentialType + :param service_principal_credential: The credential of the service principal object in Azure + Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', + servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If + servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only + be AzureKeyVaultSecretReference. + :type service_principal_credential: ~azure.synapse.artifacts.models.SecretBase + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'deployment_type': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, + 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, + 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'str'}, + 'service_principal_credential': {'key': 'typeProperties.servicePrincipalCredential', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + *, + deployment_type: Union[str, "DynamicsDeploymentType"], + authentication_type: Union[str, "DynamicsAuthenticationType"], + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + host_name: Optional[object] = None, + port: Optional[object] = None, + service_uri: Optional[object] = None, + organization_name: Optional[object] = None, + username: Optional[object] = None, + password: Optional["SecretBase"] = None, + service_principal_id: Optional[object] = None, + service_principal_credential_type: Optional[Union[str, "DynamicsServicePrincipalCredentialType"]] = None, + service_principal_credential: Optional["SecretBase"] = None, + encrypted_credential: Optional[object] = None, + **kwargs + ): + super(DynamicsCrmLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'DynamicsCrm' # type: str + self.deployment_type = deployment_type + self.host_name = host_name + self.port = port + self.service_uri = service_uri + self.organization_name = organization_name + self.authentication_type = authentication_type + self.username = username + self.password = password + self.service_principal_id = service_principal_id + self.service_principal_credential_type = service_principal_credential_type + self.service_principal_credential = service_principal_credential + self.encrypted_credential = encrypted_credential + + +class DynamicsCrmSink(CopySink): + """A copy activity Dynamics CRM sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param write_behavior: Required. The write behavior for the operation. Possible values include: + "Upsert". + :type write_behavior: str or ~azure.synapse.artifacts.models.DynamicsSinkWriteBehavior + :param ignore_null_values: The flag indicating whether to ignore null values from input dataset + (except key fields) during write operation. Default is false. Type: boolean (or Expression with + resultType boolean). + :type ignore_null_values: object + :param alternate_key_name: The logical name of the alternate key which will be used when + upserting records. Type: string (or Expression with resultType string). + :type alternate_key_name: object + """ + + _validation = { + 'type': {'required': True}, + 'write_behavior': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, + 'alternate_key_name': {'key': 'alternateKeyName', 'type': 'object'}, + } + + def __init__( + self, + *, + write_behavior: Union[str, "DynamicsSinkWriteBehavior"], + additional_properties: Optional[Dict[str, object]] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + ignore_null_values: Optional[object] = None, + alternate_key_name: Optional[object] = None, + **kwargs + ): + super(DynamicsCrmSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'DynamicsCrmSink' # type: str + self.write_behavior = write_behavior + self.ignore_null_values = ignore_null_values + self.alternate_key_name = alternate_key_name + + +class DynamicsCrmSource(CopySource): + """A copy activity Dynamics CRM source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query: FetchXML is a proprietary query language that is used in Microsoft Dynamics CRM + (online & on-premises). Type: string (or Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(DynamicsCrmSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'DynamicsCrmSource' # type: str + self.query = query + + +class DynamicsEntityDataset(Dataset): + """The Dynamics entity dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param entity_name: The logical name of the entity. Type: string (or Expression with resultType + string). + :type entity_name: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, + } + + def __init__( + self, + *, + linked_service_name: "LinkedServiceReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + entity_name: Optional[object] = None, + **kwargs + ): + super(DynamicsEntityDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'DynamicsEntity' # type: str + self.entity_name = entity_name + + +class DynamicsLinkedService(LinkedService): + """Dynamics linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param deployment_type: Required. The deployment type of the Dynamics instance. 'Online' for + Dynamics Online and 'OnPremisesWithIfd' for Dynamics on-premises with Ifd. Type: string (or + Expression with resultType string). Possible values include: "Online", "OnPremisesWithIfd". + :type deployment_type: str or ~azure.synapse.artifacts.models.DynamicsDeploymentType + :param host_name: The host name of the on-premises Dynamics server. The property is required + for on-prem and not allowed for online. Type: string (or Expression with resultType string). + :type host_name: str + :param port: The port of on-premises Dynamics server. The property is required for on-prem and + not allowed for online. Default is 443. Type: integer (or Expression with resultType integer), + minimum: 0. + :type port: str + :param service_uri: The URL to the Microsoft Dynamics server. The property is required for on- + line and not allowed for on-prem. Type: string (or Expression with resultType string). + :type service_uri: str + :param organization_name: The organization name of the Dynamics instance. The property is + required for on-prem and required for online when there are more than one Dynamics instances + associated with the user. Type: string (or Expression with resultType string). + :type organization_name: str + :param authentication_type: Required. The authentication type to connect to Dynamics server. + 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, 'AADServicePrincipal' + for Server-To-Server authentication in online scenario. Type: string (or Expression with + resultType string). Possible values include: "Office365", "Ifd", "AADServicePrincipal". + :type authentication_type: str or ~azure.synapse.artifacts.models.DynamicsAuthenticationType + :param username: User name to access the Dynamics instance. Type: string (or Expression with + resultType string). + :type username: object + :param password: Password to access the Dynamics instance. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param service_principal_id: The client ID of the application in Azure Active Directory used + for Server-To-Server authentication. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_credential_type: The service principal credential type to use in + Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' + for certificate. Type: string (or Expression with resultType string). Possible values include: + "ServicePrincipalKey", "ServicePrincipalCert". + :type service_principal_credential_type: str or + ~azure.synapse.artifacts.models.DynamicsServicePrincipalCredentialType + :param service_principal_credential: The credential of the service principal object in Azure + Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', + servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If + servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only + be AzureKeyVaultSecretReference. + :type service_principal_credential: ~azure.synapse.artifacts.models.SecretBase + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'deployment_type': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, + 'host_name': {'key': 'typeProperties.hostName', 'type': 'str'}, + 'port': {'key': 'typeProperties.port', 'type': 'str'}, + 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'str'}, + 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'str'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'str'}, + 'service_principal_credential': {'key': 'typeProperties.servicePrincipalCredential', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + *, + deployment_type: Union[str, "DynamicsDeploymentType"], + authentication_type: Union[str, "DynamicsAuthenticationType"], + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + host_name: Optional[str] = None, + port: Optional[str] = None, + service_uri: Optional[str] = None, + organization_name: Optional[str] = None, + username: Optional[object] = None, + password: Optional["SecretBase"] = None, + service_principal_id: Optional[object] = None, + service_principal_credential_type: Optional[Union[str, "DynamicsServicePrincipalCredentialType"]] = None, + service_principal_credential: Optional["SecretBase"] = None, + encrypted_credential: Optional[object] = None, + **kwargs + ): + super(DynamicsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'Dynamics' # type: str + self.deployment_type = deployment_type + self.host_name = host_name + self.port = port + self.service_uri = service_uri + self.organization_name = organization_name + self.authentication_type = authentication_type + self.username = username + self.password = password + self.service_principal_id = service_principal_id + self.service_principal_credential_type = service_principal_credential_type + self.service_principal_credential = service_principal_credential + self.encrypted_credential = encrypted_credential + + +class DynamicsSink(CopySink): + """A copy activity Dynamics sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param write_behavior: Required. The write behavior for the operation. Possible values include: + "Upsert". + :type write_behavior: str or ~azure.synapse.artifacts.models.DynamicsSinkWriteBehavior + :param ignore_null_values: The flag indicating whether ignore null values from input dataset + (except key fields) during write operation. Default is false. Type: boolean (or Expression with + resultType boolean). + :type ignore_null_values: object + :param alternate_key_name: The logical name of the alternate key which will be used when + upserting records. Type: string (or Expression with resultType string). + :type alternate_key_name: object + """ + + _validation = { + 'type': {'required': True}, + 'write_behavior': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, + 'alternate_key_name': {'key': 'alternateKeyName', 'type': 'object'}, + } + + def __init__( + self, + *, + write_behavior: Union[str, "DynamicsSinkWriteBehavior"], + additional_properties: Optional[Dict[str, object]] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + ignore_null_values: Optional[object] = None, + alternate_key_name: Optional[object] = None, + **kwargs + ): + super(DynamicsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'DynamicsSink' # type: str + self.write_behavior = write_behavior + self.ignore_null_values = ignore_null_values + self.alternate_key_name = alternate_key_name + + +class DynamicsSource(CopySource): + """A copy activity Dynamics source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query: FetchXML is a proprietary query language that is used in Microsoft Dynamics + (online & on-premises). Type: string (or Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(DynamicsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'DynamicsSource' # type: str + self.query = query + + +class EloquaLinkedService(LinkedService): + """Eloqua server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param endpoint: Required. The endpoint of the Eloqua server. (i.e. eloqua.example.com). + :type endpoint: object + :param username: Required. The site name and user name of your Eloqua account in the form: + sitename/username. (i.e. Eloqua/Alice). + :type username: object + :param password: The password corresponding to the user name. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using + HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name in the server's + certificate to match the host name of the server when connecting over SSL. The default value is + true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of the server when + connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + 'username': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + *, + endpoint: object, + username: object, + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + password: Optional["SecretBase"] = None, + use_encrypted_endpoints: Optional[object] = None, + use_host_verification: Optional[object] = None, + use_peer_verification: Optional[object] = None, + encrypted_credential: Optional[object] = None, + **kwargs + ): + super(EloquaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'Eloqua' # type: str + self.endpoint = endpoint + self.username = username + self.password = password + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + + +class EloquaObjectDataset(Dataset): + """Eloqua server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: The table name. Type: string (or Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__( + self, + *, + linked_service_name: "LinkedServiceReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + table_name: Optional[object] = None, + **kwargs + ): + super(EloquaObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'EloquaObject' # type: str + self.table_name = table_name + + +class EloquaSource(TabularSource): + """A copy activity Eloqua server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(EloquaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'EloquaSource' # type: str + self.query = query + + +class EntityReference(msrest.serialization.Model): + """The entity reference. + + :param type: The type of this referenced entity. Possible values include: + "IntegrationRuntimeReference", "LinkedServiceReference". + :type type: str or ~azure.synapse.artifacts.models.IntegrationRuntimeEntityReferenceType + :param reference_name: The name of this referenced entity. + :type reference_name: str + """ + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + } + + def __init__( + self, + *, + type: Optional[Union[str, "IntegrationRuntimeEntityReferenceType"]] = None, + reference_name: Optional[str] = None, + **kwargs + ): + super(EntityReference, self).__init__(**kwargs) + self.type = type + self.reference_name = reference_name + + +class ErrorAdditionalInfo(msrest.serialization.Model): + """The resource management error additional info. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar type: The additional info type. + :vartype type: str + :ivar info: The additional info. + :vartype info: object + """ + + _validation = { + 'type': {'readonly': True}, + 'info': {'readonly': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'info': {'key': 'info', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(ErrorAdditionalInfo, self).__init__(**kwargs) + self.type = None + self.info = None + + +class ErrorContract(msrest.serialization.Model): + """Contains details when the response code indicates an error. + + :param error: The error details. + :type error: ~azure.synapse.artifacts.models.ErrorResponse + """ + + _attribute_map = { + 'error': {'key': 'error', 'type': 'ErrorResponse'}, + } + + def __init__( + self, + *, + error: Optional["ErrorResponse"] = None, + **kwargs + ): + super(ErrorContract, self).__init__(**kwargs) + self.error = error + + +class ErrorResponse(msrest.serialization.Model): + """The resource management error response. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar code: The error code. + :vartype code: str + :ivar message: The error message. + :vartype message: str + :ivar target: The error target. + :vartype target: str + :ivar details: The error details. + :vartype details: list[~azure.synapse.artifacts.models.ErrorResponse] + :ivar additional_info: The error additional info. + :vartype additional_info: list[~azure.synapse.artifacts.models.ErrorAdditionalInfo] + """ + + _validation = { + 'code': {'readonly': True}, + 'message': {'readonly': True}, + 'target': {'readonly': True}, + 'details': {'readonly': True}, + 'additional_info': {'readonly': True}, + } + + _attribute_map = { + 'code': {'key': 'code', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + 'target': {'key': 'target', 'type': 'str'}, + 'details': {'key': 'details', 'type': '[ErrorResponse]'}, + 'additional_info': {'key': 'additionalInfo', 'type': '[ErrorAdditionalInfo]'}, + } + + def __init__( + self, + **kwargs + ): + super(ErrorResponse, self).__init__(**kwargs) + self.code = None + self.message = None + self.target = None + self.details = None + self.additional_info = None + + +class EvaluateDataFlowExpressionRequest(msrest.serialization.Model): + """Request body structure for data flow expression preview. + + :param session_id: The ID of data flow debug session. + :type session_id: str + :param data_flow_name: The data flow which contains the debug session. + :type data_flow_name: str + :param stream_name: The output stream name. + :type stream_name: str + :param row_limits: The row limit for preview request. + :type row_limits: int + :param expression: The expression for preview. + :type expression: str + """ + + _attribute_map = { + 'session_id': {'key': 'sessionId', 'type': 'str'}, + 'data_flow_name': {'key': 'dataFlowName', 'type': 'str'}, + 'stream_name': {'key': 'streamName', 'type': 'str'}, + 'row_limits': {'key': 'rowLimits', 'type': 'int'}, + 'expression': {'key': 'expression', 'type': 'str'}, + } + + def __init__( + self, + *, + session_id: Optional[str] = None, + data_flow_name: Optional[str] = None, + stream_name: Optional[str] = None, + row_limits: Optional[int] = None, + expression: Optional[str] = None, + **kwargs + ): + super(EvaluateDataFlowExpressionRequest, self).__init__(**kwargs) + self.session_id = session_id + self.data_flow_name = data_flow_name + self.stream_name = stream_name + self.row_limits = row_limits + self.expression = expression + + +class ExecuteDataFlowActivity(ExecutionActivity): + """Execute data flow activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param linked_service_name: Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param data_flow: Required. Data flow reference. + :type data_flow: ~azure.synapse.artifacts.models.DataFlowReference + :param staging: Staging info for execute data flow activity. + :type staging: ~azure.synapse.artifacts.models.DataFlowStagingInfo + :param integration_runtime: The integration runtime reference. + :type integration_runtime: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param compute: Compute properties for data flow activity. + :type compute: ~azure.synapse.artifacts.models.ExecuteDataFlowActivityTypePropertiesCompute + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'data_flow': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'data_flow': {'key': 'typeProperties.dataFlow', 'type': 'DataFlowReference'}, + 'staging': {'key': 'typeProperties.staging', 'type': 'DataFlowStagingInfo'}, + 'integration_runtime': {'key': 'typeProperties.integrationRuntime', 'type': 'IntegrationRuntimeReference'}, + 'compute': {'key': 'typeProperties.compute', 'type': 'ExecuteDataFlowActivityTypePropertiesCompute'}, + } + + def __init__( + self, + *, + name: str, + data_flow: "DataFlowReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + depends_on: Optional[List["ActivityDependency"]] = None, + user_properties: Optional[List["UserProperty"]] = None, + linked_service_name: Optional["LinkedServiceReference"] = None, + policy: Optional["ActivityPolicy"] = None, + staging: Optional["DataFlowStagingInfo"] = None, + integration_runtime: Optional["IntegrationRuntimeReference"] = None, + compute: Optional["ExecuteDataFlowActivityTypePropertiesCompute"] = None, + **kwargs + ): + super(ExecuteDataFlowActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.type = 'ExecuteDataFlow' # type: str + self.data_flow = data_flow + self.staging = staging + self.integration_runtime = integration_runtime + self.compute = compute + + +class ExecuteDataFlowActivityTypePropertiesCompute(msrest.serialization.Model): + """Compute properties for data flow activity. + + :param compute_type: Compute type of the cluster which will execute data flow job. Possible + values include: "General", "MemoryOptimized", "ComputeOptimized". + :type compute_type: str or ~azure.synapse.artifacts.models.DataFlowComputeType + :param core_count: Core count of the cluster which will execute data flow job. Supported values + are: 8, 16, 32, 48, 80, 144 and 272. + :type core_count: int + """ + + _attribute_map = { + 'compute_type': {'key': 'computeType', 'type': 'str'}, + 'core_count': {'key': 'coreCount', 'type': 'int'}, + } + + def __init__( + self, + *, + compute_type: Optional[Union[str, "DataFlowComputeType"]] = None, + core_count: Optional[int] = None, + **kwargs + ): + super(ExecuteDataFlowActivityTypePropertiesCompute, self).__init__(**kwargs) + self.compute_type = compute_type + self.core_count = core_count + + +class ExecutePipelineActivity(Activity): + """Execute pipeline activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param pipeline: Required. Pipeline reference. + :type pipeline: ~azure.synapse.artifacts.models.PipelineReference + :param parameters: Pipeline parameters. + :type parameters: dict[str, object] + :param wait_on_completion: Defines whether activity execution will wait for the dependent + pipeline execution to finish. Default is false. + :type wait_on_completion: bool + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'pipeline': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'pipeline': {'key': 'typeProperties.pipeline', 'type': 'PipelineReference'}, + 'parameters': {'key': 'typeProperties.parameters', 'type': '{object}'}, + 'wait_on_completion': {'key': 'typeProperties.waitOnCompletion', 'type': 'bool'}, + } + + def __init__( + self, + *, + name: str, + pipeline: "PipelineReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + depends_on: Optional[List["ActivityDependency"]] = None, + user_properties: Optional[List["UserProperty"]] = None, + parameters: Optional[Dict[str, object]] = None, + wait_on_completion: Optional[bool] = None, + **kwargs + ): + super(ExecutePipelineActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.type = 'ExecutePipeline' # type: str + self.pipeline = pipeline + self.parameters = parameters + self.wait_on_completion = wait_on_completion + + +class ExecuteSSISPackageActivity(ExecutionActivity): + """Execute SSIS package activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param linked_service_name: Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param package_location: Required. SSIS package location. + :type package_location: ~azure.synapse.artifacts.models.SSISPackageLocation + :param runtime: Specifies the runtime to execute SSIS package. The value should be "x86" or + "x64". Type: string (or Expression with resultType string). + :type runtime: object + :param logging_level: The logging level of SSIS package execution. Type: string (or Expression + with resultType string). + :type logging_level: object + :param environment_path: The environment path to execute the SSIS package. Type: string (or + Expression with resultType string). + :type environment_path: object + :param execution_credential: The package execution credential. + :type execution_credential: ~azure.synapse.artifacts.models.SSISExecutionCredential + :param connect_via: Required. The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param project_parameters: The project level parameters to execute the SSIS package. + :type project_parameters: dict[str, ~azure.synapse.artifacts.models.SSISExecutionParameter] + :param package_parameters: The package level parameters to execute the SSIS package. + :type package_parameters: dict[str, ~azure.synapse.artifacts.models.SSISExecutionParameter] + :param project_connection_managers: The project level connection managers to execute the SSIS + package. + :type project_connection_managers: dict[str, object] + :param package_connection_managers: The package level connection managers to execute the SSIS + package. + :type package_connection_managers: dict[str, object] + :param property_overrides: The property overrides to execute the SSIS package. + :type property_overrides: dict[str, ~azure.synapse.artifacts.models.SSISPropertyOverride] + :param log_location: SSIS package execution log location. + :type log_location: ~azure.synapse.artifacts.models.SSISLogLocation + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'package_location': {'required': True}, + 'connect_via': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'package_location': {'key': 'typeProperties.packageLocation', 'type': 'SSISPackageLocation'}, + 'runtime': {'key': 'typeProperties.runtime', 'type': 'object'}, + 'logging_level': {'key': 'typeProperties.loggingLevel', 'type': 'object'}, + 'environment_path': {'key': 'typeProperties.environmentPath', 'type': 'object'}, + 'execution_credential': {'key': 'typeProperties.executionCredential', 'type': 'SSISExecutionCredential'}, + 'connect_via': {'key': 'typeProperties.connectVia', 'type': 'IntegrationRuntimeReference'}, + 'project_parameters': {'key': 'typeProperties.projectParameters', 'type': '{SSISExecutionParameter}'}, + 'package_parameters': {'key': 'typeProperties.packageParameters', 'type': '{SSISExecutionParameter}'}, + 'project_connection_managers': {'key': 'typeProperties.projectConnectionManagers', 'type': '{object}'}, + 'package_connection_managers': {'key': 'typeProperties.packageConnectionManagers', 'type': '{object}'}, + 'property_overrides': {'key': 'typeProperties.propertyOverrides', 'type': '{SSISPropertyOverride}'}, + 'log_location': {'key': 'typeProperties.logLocation', 'type': 'SSISLogLocation'}, + } + + def __init__( + self, + *, + name: str, + package_location: "SSISPackageLocation", + connect_via: "IntegrationRuntimeReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + depends_on: Optional[List["ActivityDependency"]] = None, + user_properties: Optional[List["UserProperty"]] = None, + linked_service_name: Optional["LinkedServiceReference"] = None, + policy: Optional["ActivityPolicy"] = None, + runtime: Optional[object] = None, + logging_level: Optional[object] = None, + environment_path: Optional[object] = None, + execution_credential: Optional["SSISExecutionCredential"] = None, + project_parameters: Optional[Dict[str, "SSISExecutionParameter"]] = None, + package_parameters: Optional[Dict[str, "SSISExecutionParameter"]] = None, + project_connection_managers: Optional[Dict[str, object]] = None, + package_connection_managers: Optional[Dict[str, object]] = None, + property_overrides: Optional[Dict[str, "SSISPropertyOverride"]] = None, + log_location: Optional["SSISLogLocation"] = None, + **kwargs + ): + super(ExecuteSSISPackageActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.type = 'ExecuteSSISPackage' # type: str + self.package_location = package_location + self.runtime = runtime + self.logging_level = logging_level + self.environment_path = environment_path + self.execution_credential = execution_credential + self.connect_via = connect_via + self.project_parameters = project_parameters + self.package_parameters = package_parameters + self.project_connection_managers = project_connection_managers + self.package_connection_managers = package_connection_managers + self.property_overrides = property_overrides + self.log_location = log_location + + +class ExposureControlRequest(msrest.serialization.Model): + """The exposure control request. + + :param feature_name: The feature name. + :type feature_name: str + :param feature_type: The feature type. + :type feature_type: str + """ + + _attribute_map = { + 'feature_name': {'key': 'featureName', 'type': 'str'}, + 'feature_type': {'key': 'featureType', 'type': 'str'}, + } + + def __init__( + self, + *, + feature_name: Optional[str] = None, + feature_type: Optional[str] = None, + **kwargs + ): + super(ExposureControlRequest, self).__init__(**kwargs) + self.feature_name = feature_name + self.feature_type = feature_type + + +class ExposureControlResponse(msrest.serialization.Model): + """The exposure control response. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar feature_name: The feature name. + :vartype feature_name: str + :ivar value: The feature value. + :vartype value: str + """ + + _validation = { + 'feature_name': {'readonly': True}, + 'value': {'readonly': True}, + } + + _attribute_map = { + 'feature_name': {'key': 'featureName', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ExposureControlResponse, self).__init__(**kwargs) + self.feature_name = None + self.value = None + + +class Expression(msrest.serialization.Model): + """Azure Synapse expression definition. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Expression type. Possible values include: "Expression". + :type type: str or ~azure.synapse.artifacts.models.ExpressionType + :param value: Required. Expression value. + :type value: str + """ + + _validation = { + 'type': {'required': True}, + 'value': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'str'}, + } + + def __init__( + self, + *, + type: Union[str, "ExpressionType"], + value: str, + **kwargs + ): + super(Expression, self).__init__(**kwargs) + self.type = type + self.value = value + + +class FileServerLinkedService(LinkedService): + """File system linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param host: Required. Host name of the server. Type: string (or Expression with resultType + string). + :type host: object + :param user_id: User ID to logon the server. Type: string (or Expression with resultType + string). + :type user_id: object + :param password: Password to logon the server. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'user_id': {'key': 'typeProperties.userId', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + *, + host: object, + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + user_id: Optional[object] = None, + password: Optional["SecretBase"] = None, + encrypted_credential: Optional[object] = None, + **kwargs + ): + super(FileServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'FileServer' # type: str + self.host = host + self.user_id = user_id + self.password = password + self.encrypted_credential = encrypted_credential + + +class FileServerLocation(DatasetLocation): + """The location of file server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location.Constant filled by server. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + folder_path: Optional[object] = None, + file_name: Optional[object] = None, + **kwargs + ): + super(FileServerLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) + self.type = 'FileServerLocation' # type: str + + +class FileServerReadSettings(StoreReadSettings): + """File server read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The read setting type.Constant filled by server. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :type recursive: object + :param wildcard_folder_path: FileServer wildcardFolderPath. Type: string (or Expression with + resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: FileServer wildcardFileName. Type: string (or Expression with + resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. Type: string (or + Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression + with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + max_concurrent_connections: Optional[object] = None, + recursive: Optional[object] = None, + wildcard_folder_path: Optional[object] = None, + wildcard_file_name: Optional[object] = None, + enable_partition_discovery: Optional[bool] = None, + modified_datetime_start: Optional[object] = None, + modified_datetime_end: Optional[object] = None, + **kwargs + ): + super(FileServerReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'FileServerReadSettings' # type: str + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.enable_partition_discovery = enable_partition_discovery + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end + + +class FileServerWriteSettings(StoreWriteSettings): + """File server write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The write setting type.Constant filled by server. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + max_concurrent_connections: Optional[object] = None, + copy_behavior: Optional[object] = None, + **kwargs + ): + super(FileServerWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + self.type = 'FileServerWriteSettings' # type: str + + +class FileSystemSink(CopySink): + """A copy activity file system sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + copy_behavior: Optional[object] = None, + **kwargs + ): + super(FileSystemSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'FileSystemSink' # type: str + self.copy_behavior = copy_behavior + + +class FileSystemSource(CopySource): + """A copy activity file system source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :type recursive: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + recursive: Optional[object] = None, + **kwargs + ): + super(FileSystemSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'FileSystemSource' # type: str + self.recursive = recursive + + +class FilterActivity(Activity): + """Filter and return results from input array based on the conditions. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param items: Required. Input array on which filter should be applied. + :type items: ~azure.synapse.artifacts.models.Expression + :param condition: Required. Condition to be used for filtering the input. + :type condition: ~azure.synapse.artifacts.models.Expression + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'items': {'required': True}, + 'condition': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'items': {'key': 'typeProperties.items', 'type': 'Expression'}, + 'condition': {'key': 'typeProperties.condition', 'type': 'Expression'}, + } + + def __init__( + self, + *, + name: str, + items: "Expression", + condition: "Expression", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + depends_on: Optional[List["ActivityDependency"]] = None, + user_properties: Optional[List["UserProperty"]] = None, + **kwargs + ): + super(FilterActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.type = 'Filter' # type: str + self.items = items + self.condition = condition + + +class ForEachActivity(Activity): + """This activity is used for iterating over a collection and execute given activities. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param is_sequential: Should the loop be executed in sequence or in parallel (max 50). + :type is_sequential: bool + :param batch_count: Batch count to be used for controlling the number of parallel execution + (when isSequential is set to false). + :type batch_count: int + :param items: Required. Collection to iterate. + :type items: ~azure.synapse.artifacts.models.Expression + :param activities: Required. List of activities to execute . + :type activities: list[~azure.synapse.artifacts.models.Activity] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'batch_count': {'maximum': 50}, + 'items': {'required': True}, + 'activities': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'is_sequential': {'key': 'typeProperties.isSequential', 'type': 'bool'}, + 'batch_count': {'key': 'typeProperties.batchCount', 'type': 'int'}, + 'items': {'key': 'typeProperties.items', 'type': 'Expression'}, + 'activities': {'key': 'typeProperties.activities', 'type': '[Activity]'}, + } + + def __init__( + self, + *, + name: str, + items: "Expression", + activities: List["Activity"], + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + depends_on: Optional[List["ActivityDependency"]] = None, + user_properties: Optional[List["UserProperty"]] = None, + is_sequential: Optional[bool] = None, + batch_count: Optional[int] = None, + **kwargs + ): + super(ForEachActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.type = 'ForEach' # type: str + self.is_sequential = is_sequential + self.batch_count = batch_count + self.items = items + self.activities = activities + + +class FtpReadSettings(StoreReadSettings): + """Ftp read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The read setting type.Constant filled by server. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :type recursive: object + :param wildcard_folder_path: Ftp wildcardFolderPath. Type: string (or Expression with + resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Ftp wildcardFileName. Type: string (or Expression with resultType + string). + :type wildcard_file_name: object + :param use_binary_transfer: Specify whether to use binary transfer mode for FTP stores. + :type use_binary_transfer: bool + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'use_binary_transfer': {'key': 'useBinaryTransfer', 'type': 'bool'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + max_concurrent_connections: Optional[object] = None, + recursive: Optional[object] = None, + wildcard_folder_path: Optional[object] = None, + wildcard_file_name: Optional[object] = None, + use_binary_transfer: Optional[bool] = None, + **kwargs + ): + super(FtpReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'FtpReadSettings' # type: str + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.use_binary_transfer = use_binary_transfer + + +class FtpServerLinkedService(LinkedService): + """A FTP server Linked Service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param host: Required. Host name of the FTP server. Type: string (or Expression with resultType + string). + :type host: object + :param port: The TCP port number that the FTP server uses to listen for client connections. + Default value is 21. Type: integer (or Expression with resultType integer), minimum: 0. + :type port: object + :param authentication_type: The authentication type to be used to connect to the FTP server. + Possible values include: "Basic", "Anonymous". + :type authentication_type: str or ~azure.synapse.artifacts.models.FtpAuthenticationType + :param user_name: Username to logon the FTP server. Type: string (or Expression with resultType + string). + :type user_name: object + :param password: Password to logon the FTP server. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + :param enable_ssl: If true, connect to the FTP server over SSL/TLS channel. Default value is + true. Type: boolean (or Expression with resultType boolean). + :type enable_ssl: object + :param enable_server_certificate_validation: If true, validate the FTP server SSL certificate + when connect over SSL/TLS channel. Default value is true. Type: boolean (or Expression with + resultType boolean). + :type enable_server_certificate_validation: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, + } + + def __init__( + self, + *, + host: object, + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + port: Optional[object] = None, + authentication_type: Optional[Union[str, "FtpAuthenticationType"]] = None, + user_name: Optional[object] = None, + password: Optional["SecretBase"] = None, + encrypted_credential: Optional[object] = None, + enable_ssl: Optional[object] = None, + enable_server_certificate_validation: Optional[object] = None, + **kwargs + ): + super(FtpServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'FtpServer' # type: str + self.host = host + self.port = port + self.authentication_type = authentication_type + self.user_name = user_name + self.password = password + self.encrypted_credential = encrypted_credential + self.enable_ssl = enable_ssl + self.enable_server_certificate_validation = enable_server_certificate_validation + + +class FtpServerLocation(DatasetLocation): + """The location of ftp server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location.Constant filled by server. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + folder_path: Optional[object] = None, + file_name: Optional[object] = None, + **kwargs + ): + super(FtpServerLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) + self.type = 'FtpServerLocation' # type: str + + +class GetMetadataActivity(ExecutionActivity): + """Activity to get metadata of dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param linked_service_name: Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param dataset: Required. GetMetadata activity dataset reference. + :type dataset: ~azure.synapse.artifacts.models.DatasetReference + :param field_list: Fields of metadata to get from dataset. + :type field_list: list[object] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'dataset': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, + 'field_list': {'key': 'typeProperties.fieldList', 'type': '[object]'}, + } + + def __init__( + self, + *, + name: str, + dataset: "DatasetReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + depends_on: Optional[List["ActivityDependency"]] = None, + user_properties: Optional[List["UserProperty"]] = None, + linked_service_name: Optional["LinkedServiceReference"] = None, + policy: Optional["ActivityPolicy"] = None, + field_list: Optional[List[object]] = None, + **kwargs + ): + super(GetMetadataActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.type = 'GetMetadata' # type: str + self.dataset = dataset + self.field_list = field_list + + +class GetSsisObjectMetadataRequest(msrest.serialization.Model): + """The request payload of get SSIS object metadata. + + :param metadata_path: Metadata path. + :type metadata_path: str + """ + + _attribute_map = { + 'metadata_path': {'key': 'metadataPath', 'type': 'str'}, + } + + def __init__( + self, + *, + metadata_path: Optional[str] = None, + **kwargs + ): + super(GetSsisObjectMetadataRequest, self).__init__(**kwargs) + self.metadata_path = metadata_path + + +class GoogleAdWordsLinkedService(LinkedService): + """Google AdWords service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param client_customer_id: Required. The Client customer ID of the AdWords account that you + want to fetch report data for. + :type client_customer_id: object + :param developer_token: Required. The developer token associated with the manager account that + you use to grant access to the AdWords API. + :type developer_token: ~azure.synapse.artifacts.models.SecretBase + :param authentication_type: Required. The OAuth 2.0 authentication mechanism used for + authentication. ServiceAuthentication can only be used on self-hosted IR. Possible values + include: "ServiceAuthentication", "UserAuthentication". + :type authentication_type: str or + ~azure.synapse.artifacts.models.GoogleAdWordsAuthenticationType + :param refresh_token: The refresh token obtained from Google for authorizing access to AdWords + for UserAuthentication. + :type refresh_token: ~azure.synapse.artifacts.models.SecretBase + :param client_id: The client id of the google application used to acquire the refresh token. + Type: string (or Expression with resultType string). + :type client_id: object + :param client_secret: The client secret of the google application used to acquire the refresh + token. + :type client_secret: ~azure.synapse.artifacts.models.SecretBase + :param email: The service account email ID that is used for ServiceAuthentication and can only + be used on self-hosted IR. + :type email: object + :param key_file_path: The full path to the .p12 key file that is used to authenticate the + service account email address and can only be used on self-hosted IR. + :type key_file_path: object + :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for + verifying the server when connecting over SSL. This property can only be set when using SSL on + self-hosted IR. The default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust + store or from a specified PEM file. The default value is false. + :type use_system_trust_store: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'client_customer_id': {'required': True}, + 'developer_token': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'client_customer_id': {'key': 'typeProperties.clientCustomerID', 'type': 'object'}, + 'developer_token': {'key': 'typeProperties.developerToken', 'type': 'SecretBase'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'email': {'key': 'typeProperties.email', 'type': 'object'}, + 'key_file_path': {'key': 'typeProperties.keyFilePath', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + *, + client_customer_id: object, + developer_token: "SecretBase", + authentication_type: Union[str, "GoogleAdWordsAuthenticationType"], + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + refresh_token: Optional["SecretBase"] = None, + client_id: Optional[object] = None, + client_secret: Optional["SecretBase"] = None, + email: Optional[object] = None, + key_file_path: Optional[object] = None, + trusted_cert_path: Optional[object] = None, + use_system_trust_store: Optional[object] = None, + encrypted_credential: Optional[object] = None, + **kwargs + ): + super(GoogleAdWordsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'GoogleAdWords' # type: str + self.client_customer_id = client_customer_id + self.developer_token = developer_token + self.authentication_type = authentication_type + self.refresh_token = refresh_token + self.client_id = client_id + self.client_secret = client_secret + self.email = email + self.key_file_path = key_file_path + self.trusted_cert_path = trusted_cert_path + self.use_system_trust_store = use_system_trust_store + self.encrypted_credential = encrypted_credential + + +class GoogleAdWordsObjectDataset(Dataset): + """Google AdWords service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: The table name. Type: string (or Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__( + self, + *, + linked_service_name: "LinkedServiceReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + table_name: Optional[object] = None, + **kwargs + ): + super(GoogleAdWordsObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'GoogleAdWordsObject' # type: str + self.table_name = table_name + + +class GoogleAdWordsSource(TabularSource): + """A copy activity Google AdWords service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(GoogleAdWordsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'GoogleAdWordsSource' # type: str + self.query = query + + +class GoogleBigQueryLinkedService(LinkedService): + """Google BigQuery service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param project: Required. The default BigQuery project to query against. + :type project: object + :param additional_projects: A comma-separated list of public BigQuery projects to access. + :type additional_projects: object + :param request_google_drive_scope: Whether to request access to Google Drive. Allowing Google + Drive access enables support for federated tables that combine BigQuery data with data from + Google Drive. The default value is false. + :type request_google_drive_scope: object + :param authentication_type: Required. The OAuth 2.0 authentication mechanism used for + authentication. ServiceAuthentication can only be used on self-hosted IR. Possible values + include: "ServiceAuthentication", "UserAuthentication". + :type authentication_type: str or + ~azure.synapse.artifacts.models.GoogleBigQueryAuthenticationType + :param refresh_token: The refresh token obtained from Google for authorizing access to BigQuery + for UserAuthentication. + :type refresh_token: ~azure.synapse.artifacts.models.SecretBase + :param client_id: The client id of the google application used to acquire the refresh token. + Type: string (or Expression with resultType string). + :type client_id: object + :param client_secret: The client secret of the google application used to acquire the refresh + token. + :type client_secret: ~azure.synapse.artifacts.models.SecretBase + :param email: The service account email ID that is used for ServiceAuthentication and can only + be used on self-hosted IR. + :type email: object + :param key_file_path: The full path to the .p12 key file that is used to authenticate the + service account email address and can only be used on self-hosted IR. + :type key_file_path: object + :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for + verifying the server when connecting over SSL. This property can only be set when using SSL on + self-hosted IR. The default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust + store or from a specified PEM file. The default value is false. + :type use_system_trust_store: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'project': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'project': {'key': 'typeProperties.project', 'type': 'object'}, + 'additional_projects': {'key': 'typeProperties.additionalProjects', 'type': 'object'}, + 'request_google_drive_scope': {'key': 'typeProperties.requestGoogleDriveScope', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'email': {'key': 'typeProperties.email', 'type': 'object'}, + 'key_file_path': {'key': 'typeProperties.keyFilePath', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + *, + project: object, + authentication_type: Union[str, "GoogleBigQueryAuthenticationType"], + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + additional_projects: Optional[object] = None, + request_google_drive_scope: Optional[object] = None, + refresh_token: Optional["SecretBase"] = None, + client_id: Optional[object] = None, + client_secret: Optional["SecretBase"] = None, + email: Optional[object] = None, + key_file_path: Optional[object] = None, + trusted_cert_path: Optional[object] = None, + use_system_trust_store: Optional[object] = None, + encrypted_credential: Optional[object] = None, + **kwargs + ): + super(GoogleBigQueryLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'GoogleBigQuery' # type: str + self.project = project + self.additional_projects = additional_projects + self.request_google_drive_scope = request_google_drive_scope + self.authentication_type = authentication_type + self.refresh_token = refresh_token + self.client_id = client_id + self.client_secret = client_secret + self.email = email + self.key_file_path = key_file_path + self.trusted_cert_path = trusted_cert_path + self.use_system_trust_store = use_system_trust_store + self.encrypted_credential = encrypted_credential + + +class GoogleBigQueryObjectDataset(Dataset): + """Google BigQuery service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: This property will be retired. Please consider using database + table + properties instead. + :type table_name: object + :param table: The table name of the Google BigQuery. Type: string (or Expression with + resultType string). + :type table: object + :param dataset: The database name of the Google BigQuery. Type: string (or Expression with + resultType string). + :type dataset: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'dataset': {'key': 'typeProperties.dataset', 'type': 'object'}, + } + + def __init__( + self, + *, + linked_service_name: "LinkedServiceReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + table_name: Optional[object] = None, + table: Optional[object] = None, + dataset: Optional[object] = None, + **kwargs + ): + super(GoogleBigQueryObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'GoogleBigQueryObject' # type: str + self.table_name = table_name + self.table = table + self.dataset = dataset + + +class GoogleBigQuerySource(TabularSource): + """A copy activity Google BigQuery service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(GoogleBigQuerySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'GoogleBigQuerySource' # type: str + self.query = query + + +class GoogleCloudStorageLinkedService(LinkedService): + """Linked service for Google Cloud Storage. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param access_key_id: The access key identifier of the Google Cloud Storage Identity and Access + Management (IAM) user. Type: string (or Expression with resultType string). + :type access_key_id: object + :param secret_access_key: The secret access key of the Google Cloud Storage Identity and Access + Management (IAM) user. + :type secret_access_key: ~azure.synapse.artifacts.models.SecretBase + :param service_url: This value specifies the endpoint to access with the Google Cloud Storage + Connector. This is an optional property; change it only if you want to try a different service + endpoint or want to switch between https and http. Type: string (or Expression with resultType + string). + :type service_url: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, + 'secret_access_key': {'key': 'typeProperties.secretAccessKey', 'type': 'SecretBase'}, + 'service_url': {'key': 'typeProperties.serviceUrl', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + access_key_id: Optional[object] = None, + secret_access_key: Optional["SecretBase"] = None, + service_url: Optional[object] = None, + encrypted_credential: Optional[object] = None, + **kwargs + ): + super(GoogleCloudStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'GoogleCloudStorage' # type: str + self.access_key_id = access_key_id + self.secret_access_key = secret_access_key + self.service_url = service_url + self.encrypted_credential = encrypted_credential + + +class GoogleCloudStorageLocation(DatasetLocation): + """The location of Google Cloud Storage dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location.Constant filled by server. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + string). + :type file_name: object + :param bucket_name: Specify the bucketName of Google Cloud Storage. Type: string (or Expression + with resultType string). + :type bucket_name: object + :param version: Specify the version of Google Cloud Storage. Type: string (or Expression with + resultType string). + :type version: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'bucket_name': {'key': 'bucketName', 'type': 'object'}, + 'version': {'key': 'version', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + folder_path: Optional[object] = None, + file_name: Optional[object] = None, + bucket_name: Optional[object] = None, + version: Optional[object] = None, + **kwargs + ): + super(GoogleCloudStorageLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) + self.type = 'GoogleCloudStorageLocation' # type: str + self.bucket_name = bucket_name + self.version = version + + +class GoogleCloudStorageReadSettings(StoreReadSettings): + """Google Cloud Storage read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The read setting type.Constant filled by server. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :type recursive: object + :param wildcard_folder_path: Google Cloud Storage wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Google Cloud Storage wildcardFileName. Type: string (or Expression + with resultType string). + :type wildcard_file_name: object + :param prefix: The prefix filter for the Google Cloud Storage object name. Type: string (or + Expression with resultType string). + :type prefix: object + :param enable_partition_discovery: Indicates whether to enable partition discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. Type: string (or + Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression + with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'prefix': {'key': 'prefix', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + max_concurrent_connections: Optional[object] = None, + recursive: Optional[object] = None, + wildcard_folder_path: Optional[object] = None, + wildcard_file_name: Optional[object] = None, + prefix: Optional[object] = None, + enable_partition_discovery: Optional[bool] = None, + modified_datetime_start: Optional[object] = None, + modified_datetime_end: Optional[object] = None, + **kwargs + ): + super(GoogleCloudStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'GoogleCloudStorageReadSettings' # type: str + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.prefix = prefix + self.enable_partition_discovery = enable_partition_discovery + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end + + +class GreenplumLinkedService(LinkedService): + """Greenplum Database linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param connection_string: An ODBC connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param pwd: The Azure key vault secret reference of password in connection string. + :type pwd: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + connection_string: Optional[object] = None, + pwd: Optional["AzureKeyVaultSecretReference"] = None, + encrypted_credential: Optional[object] = None, + **kwargs + ): + super(GreenplumLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'Greenplum' # type: str + self.connection_string = connection_string + self.pwd = pwd + self.encrypted_credential = encrypted_credential + + +class GreenplumSource(TabularSource): + """A copy activity Greenplum Database source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(GreenplumSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'GreenplumSource' # type: str + self.query = query + + +class GreenplumTableDataset(Dataset): + """Greenplum Database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: This property will be retired. Please consider using schema + table + properties instead. + :type table_name: object + :param table: The table name of Greenplum. Type: string (or Expression with resultType string). + :type table: object + :param schema_type_properties_schema: The schema name of Greenplum. Type: string (or Expression + with resultType string). + :type schema_type_properties_schema: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__( + self, + *, + linked_service_name: "LinkedServiceReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + table_name: Optional[object] = None, + table: Optional[object] = None, + schema_type_properties_schema: Optional[object] = None, + **kwargs + ): + super(GreenplumTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'GreenplumTable' # type: str + self.table_name = table_name + self.table = table + self.schema_type_properties_schema = schema_type_properties_schema + + +class HBaseLinkedService(LinkedService): + """HBase server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param host: Required. The IP address or host name of the HBase server. (i.e. 192.168.222.160). + :type host: object + :param port: The TCP port that the HBase instance uses to listen for client connections. The + default value is 9090. + :type port: object + :param http_path: The partial URL corresponding to the HBase server. (i.e. + /gateway/sandbox/hbase/version). + :type http_path: object + :param authentication_type: Required. The authentication mechanism to use to connect to the + HBase server. Possible values include: "Anonymous", "Basic". + :type authentication_type: str or ~azure.synapse.artifacts.models.HBaseAuthenticationType + :param username: The user name used to connect to the HBase instance. + :type username: object + :param password: The password corresponding to the user name. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The + default value is false. + :type enable_ssl: object + :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for + verifying the server when connecting over SSL. This property can only be set when using SSL on + self-hosted IR. The default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate + name to match the host name of the server when connecting over SSL. The default value is false. + :type allow_host_name_cn_mismatch: object + :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from + the server. The default value is false. + :type allow_self_signed_server_cert: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, + 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + *, + host: object, + authentication_type: Union[str, "HBaseAuthenticationType"], + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + port: Optional[object] = None, + http_path: Optional[object] = None, + username: Optional[object] = None, + password: Optional["SecretBase"] = None, + enable_ssl: Optional[object] = None, + trusted_cert_path: Optional[object] = None, + allow_host_name_cn_mismatch: Optional[object] = None, + allow_self_signed_server_cert: Optional[object] = None, + encrypted_credential: Optional[object] = None, + **kwargs + ): + super(HBaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'HBase' # type: str + self.host = host + self.port = port + self.http_path = http_path + self.authentication_type = authentication_type + self.username = username + self.password = password + self.enable_ssl = enable_ssl + self.trusted_cert_path = trusted_cert_path + self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch + self.allow_self_signed_server_cert = allow_self_signed_server_cert + self.encrypted_credential = encrypted_credential + + +class HBaseObjectDataset(Dataset): + """HBase server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: The table name. Type: string (or Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__( + self, + *, + linked_service_name: "LinkedServiceReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + table_name: Optional[object] = None, + **kwargs + ): + super(HBaseObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'HBaseObject' # type: str + self.table_name = table_name + + +class HBaseSource(TabularSource): + """A copy activity HBase server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(HBaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'HBaseSource' # type: str + self.query = query + + +class HdfsLinkedService(LinkedService): + """Hadoop Distributed File System (HDFS) linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param url: Required. The URL of the HDFS service endpoint, e.g. + http://myhostname:50070/webhdfs/v1 . Type: string (or Expression with resultType string). + :type url: object + :param authentication_type: Type of authentication used to connect to the HDFS. Possible values + are: Anonymous and Windows. Type: string (or Expression with resultType string). + :type authentication_type: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + :param user_name: User name for Windows authentication. Type: string (or Expression with + resultType string). + :type user_name: object + :param password: Password for Windows authentication. + :type password: ~azure.synapse.artifacts.models.SecretBase + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + } + + def __init__( + self, + *, + url: object, + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + authentication_type: Optional[object] = None, + encrypted_credential: Optional[object] = None, + user_name: Optional[object] = None, + password: Optional["SecretBase"] = None, + **kwargs + ): + super(HdfsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'Hdfs' # type: str + self.url = url + self.authentication_type = authentication_type + self.encrypted_credential = encrypted_credential + self.user_name = user_name + self.password = password + + +class HdfsLocation(DatasetLocation): + """The location of HDFS. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location.Constant filled by server. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + folder_path: Optional[object] = None, + file_name: Optional[object] = None, + **kwargs + ): + super(HdfsLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) + self.type = 'HdfsLocation' # type: str + + +class HdfsReadSettings(StoreReadSettings): + """HDFS read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The read setting type.Constant filled by server. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :type recursive: object + :param wildcard_folder_path: HDFS wildcardFolderPath. Type: string (or Expression with + resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: HDFS wildcardFileName. Type: string (or Expression with resultType + string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. Type: string (or + Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression + with resultType string). + :type modified_datetime_end: object + :param distcp_settings: Specifies Distcp-related settings. + :type distcp_settings: ~azure.synapse.artifacts.models.DistcpSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + 'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + max_concurrent_connections: Optional[object] = None, + recursive: Optional[object] = None, + wildcard_folder_path: Optional[object] = None, + wildcard_file_name: Optional[object] = None, + enable_partition_discovery: Optional[bool] = None, + modified_datetime_start: Optional[object] = None, + modified_datetime_end: Optional[object] = None, + distcp_settings: Optional["DistcpSettings"] = None, + **kwargs + ): + super(HdfsReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'HdfsReadSettings' # type: str + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.enable_partition_discovery = enable_partition_discovery + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end + self.distcp_settings = distcp_settings + + +class HdfsSource(CopySource): + """A copy activity HDFS source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :type recursive: object + :param distcp_settings: Specifies Distcp-related settings. + :type distcp_settings: ~azure.synapse.artifacts.models.DistcpSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + recursive: Optional[object] = None, + distcp_settings: Optional["DistcpSettings"] = None, + **kwargs + ): + super(HdfsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'HdfsSource' # type: str + self.recursive = recursive + self.distcp_settings = distcp_settings + + +class HDInsightHiveActivity(ExecutionActivity): + """HDInsight Hive activity type. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param linked_service_name: Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param storage_linked_services: Storage linked service references. + :type storage_linked_services: list[~azure.synapse.artifacts.models.LinkedServiceReference] + :param arguments: User specified arguments to HDInsightActivity. + :type arguments: list[object] + :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". + :type get_debug_info: str or ~azure.synapse.artifacts.models.HDInsightActivityDebugInfoOption + :param script_path: Script path. Type: string (or Expression with resultType string). + :type script_path: object + :param script_linked_service: Script linked service reference. + :type script_linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference + :param defines: Allows user to specify defines for Hive job request. + :type defines: dict[str, object] + :param variables: User specified arguments under hivevar namespace. + :type variables: list[object] + :param query_timeout: Query timeout value (in minutes). Effective when the HDInsight cluster + is with ESP (Enterprise Security Package). + :type query_timeout: int + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, + 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, + 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, + 'script_path': {'key': 'typeProperties.scriptPath', 'type': 'object'}, + 'script_linked_service': {'key': 'typeProperties.scriptLinkedService', 'type': 'LinkedServiceReference'}, + 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, + 'variables': {'key': 'typeProperties.variables', 'type': '[object]'}, + 'query_timeout': {'key': 'typeProperties.queryTimeout', 'type': 'int'}, + } + + def __init__( + self, + *, + name: str, + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + depends_on: Optional[List["ActivityDependency"]] = None, + user_properties: Optional[List["UserProperty"]] = None, + linked_service_name: Optional["LinkedServiceReference"] = None, + policy: Optional["ActivityPolicy"] = None, + storage_linked_services: Optional[List["LinkedServiceReference"]] = None, + arguments: Optional[List[object]] = None, + get_debug_info: Optional[Union[str, "HDInsightActivityDebugInfoOption"]] = None, + script_path: Optional[object] = None, + script_linked_service: Optional["LinkedServiceReference"] = None, + defines: Optional[Dict[str, object]] = None, + variables: Optional[List[object]] = None, + query_timeout: Optional[int] = None, + **kwargs + ): + super(HDInsightHiveActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.type = 'HDInsightHive' # type: str + self.storage_linked_services = storage_linked_services + self.arguments = arguments + self.get_debug_info = get_debug_info + self.script_path = script_path + self.script_linked_service = script_linked_service + self.defines = defines + self.variables = variables + self.query_timeout = query_timeout + + +class HDInsightLinkedService(LinkedService): + """HDInsight linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param cluster_uri: Required. HDInsight cluster URI. Type: string (or Expression with + resultType string). + :type cluster_uri: object + :param user_name: HDInsight cluster user name. Type: string (or Expression with resultType + string). + :type user_name: object + :param password: HDInsight cluster password. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param linked_service_name: The Azure Storage linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param hcatalog_linked_service_name: A reference to the Azure SQL linked service that points to + the HCatalog database. + :type hcatalog_linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + :param is_esp_enabled: Specify if the HDInsight is created with ESP (Enterprise Security + Package). Type: Boolean. + :type is_esp_enabled: object + :param file_system: Specify the FileSystem if the main storage for the HDInsight is ADLS Gen2. + Type: string (or Expression with resultType string). + :type file_system: object + """ + + _validation = { + 'type': {'required': True}, + 'cluster_uri': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'client_customer_id': {'key': 'typeProperties.clientCustomerID', 'type': 'object'}, - 'developer_token': {'key': 'typeProperties.developerToken', 'type': 'SecretBase'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'email': {'key': 'typeProperties.email', 'type': 'object'}, - 'key_file_path': {'key': 'typeProperties.keyFilePath', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'cluster_uri': {'key': 'typeProperties.clusterUri', 'type': 'object'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, + 'hcatalog_linked_service_name': {'key': 'typeProperties.hcatalogLinkedServiceName', 'type': 'LinkedServiceReference'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'is_esp_enabled': {'key': 'typeProperties.isEspEnabled', 'type': 'object'}, + 'file_system': {'key': 'typeProperties.fileSystem', 'type': 'object'}, + } + + def __init__( + self, + *, + cluster_uri: object, + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + user_name: Optional[object] = None, + password: Optional["SecretBase"] = None, + linked_service_name: Optional["LinkedServiceReference"] = None, + hcatalog_linked_service_name: Optional["LinkedServiceReference"] = None, + encrypted_credential: Optional[object] = None, + is_esp_enabled: Optional[object] = None, + file_system: Optional[object] = None, + **kwargs + ): + super(HDInsightLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'HDInsight' # type: str + self.cluster_uri = cluster_uri + self.user_name = user_name + self.password = password + self.linked_service_name = linked_service_name + self.hcatalog_linked_service_name = hcatalog_linked_service_name + self.encrypted_credential = encrypted_credential + self.is_esp_enabled = is_esp_enabled + self.file_system = file_system + + +class HDInsightMapReduceActivity(ExecutionActivity): + """HDInsight MapReduce activity type. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param linked_service_name: Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param storage_linked_services: Storage linked service references. + :type storage_linked_services: list[~azure.synapse.artifacts.models.LinkedServiceReference] + :param arguments: User specified arguments to HDInsightActivity. + :type arguments: list[object] + :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". + :type get_debug_info: str or ~azure.synapse.artifacts.models.HDInsightActivityDebugInfoOption + :param class_name: Required. Class name. Type: string (or Expression with resultType string). + :type class_name: object + :param jar_file_path: Required. Jar path. Type: string (or Expression with resultType string). + :type jar_file_path: object + :param jar_linked_service: Jar linked service reference. + :type jar_linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference + :param jar_libs: Jar libs. + :type jar_libs: list[object] + :param defines: Allows user to specify defines for the MapReduce job request. + :type defines: dict[str, object] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'class_name': {'required': True}, + 'jar_file_path': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, + 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, + 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, + 'class_name': {'key': 'typeProperties.className', 'type': 'object'}, + 'jar_file_path': {'key': 'typeProperties.jarFilePath', 'type': 'object'}, + 'jar_linked_service': {'key': 'typeProperties.jarLinkedService', 'type': 'LinkedServiceReference'}, + 'jar_libs': {'key': 'typeProperties.jarLibs', 'type': '[object]'}, + 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, } def __init__( self, *, - client_customer_id: object, - developer_token: "SecretBase", - authentication_type: Union[str, "GoogleAdWordsAuthenticationType"], + name: str, + class_name: object, + jar_file_path: object, additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - refresh_token: Optional["SecretBase"] = None, - client_id: Optional[object] = None, - client_secret: Optional["SecretBase"] = None, - email: Optional[object] = None, - key_file_path: Optional[object] = None, - trusted_cert_path: Optional[object] = None, - use_system_trust_store: Optional[object] = None, - encrypted_credential: Optional[object] = None, + depends_on: Optional[List["ActivityDependency"]] = None, + user_properties: Optional[List["UserProperty"]] = None, + linked_service_name: Optional["LinkedServiceReference"] = None, + policy: Optional["ActivityPolicy"] = None, + storage_linked_services: Optional[List["LinkedServiceReference"]] = None, + arguments: Optional[List[object]] = None, + get_debug_info: Optional[Union[str, "HDInsightActivityDebugInfoOption"]] = None, + jar_linked_service: Optional["LinkedServiceReference"] = None, + jar_libs: Optional[List[object]] = None, + defines: Optional[Dict[str, object]] = None, **kwargs ): - super(GoogleAdWordsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'GoogleAdWords' # type: str - self.client_customer_id = client_customer_id - self.developer_token = developer_token - self.authentication_type = authentication_type - self.refresh_token = refresh_token - self.client_id = client_id - self.client_secret = client_secret - self.email = email - self.key_file_path = key_file_path - self.trusted_cert_path = trusted_cert_path - self.use_system_trust_store = use_system_trust_store - self.encrypted_credential = encrypted_credential + super(HDInsightMapReduceActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.type = 'HDInsightMapReduce' # type: str + self.storage_linked_services = storage_linked_services + self.arguments = arguments + self.get_debug_info = get_debug_info + self.class_name = class_name + self.jar_file_path = jar_file_path + self.jar_linked_service = jar_linked_service + self.jar_libs = jar_libs + self.defines = defines -class GoogleAdWordsObjectDataset(Dataset): - """Google AdWords service dataset. +class HDInsightOnDemandLinkedService(LinkedService): + """HDInsight ondemand linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Type of linked service.Constant filled by server. :type type: str - :param description: Dataset description. + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. + :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. + :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :param cluster_size: Required. Number of worker/data nodes in the cluster. Suggestion value: 4. + Type: string (or Expression with resultType string). + :type cluster_size: object + :param time_to_live: Required. The allowed idle time for the on-demand HDInsight cluster. + Specifies how long the on-demand HDInsight cluster stays alive after completion of an activity + run if there are no other active jobs in the cluster. The minimum value is 5 mins. Type: string + (or Expression with resultType string). + :type time_to_live: object + :param version: Required. Version of the HDInsight cluster.  Type: string (or Expression with + resultType string). + :type version: object + :param linked_service_name: Required. Azure Storage linked service to be used by the on-demand + cluster for storing and processing data. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param host_subscription_id: Required. The customer’s subscription to host the cluster. Type: + string (or Expression with resultType string). + :type host_subscription_id: object + :param service_principal_id: The service principal id for the hostSubscriptionId. Type: string + (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The key for the service principal id. + :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase + :param tenant: Required. The Tenant id/name to which the service principal belongs. Type: + string (or Expression with resultType string). + :type tenant: object + :param cluster_resource_group: Required. The resource group where the cluster belongs. Type: + string (or Expression with resultType string). + :type cluster_resource_group: object + :param cluster_name_prefix: The prefix of cluster name, postfix will be distinct with + timestamp. Type: string (or Expression with resultType string). + :type cluster_name_prefix: object + :param cluster_user_name: The username to access the cluster. Type: string (or Expression with + resultType string). + :type cluster_user_name: object + :param cluster_password: The password to access the cluster. + :type cluster_password: ~azure.synapse.artifacts.models.SecretBase + :param cluster_ssh_user_name: The username to SSH remotely connect to cluster’s node (for + Linux). Type: string (or Expression with resultType string). + :type cluster_ssh_user_name: object + :param cluster_ssh_password: The password to SSH remotely connect cluster’s node (for Linux). + :type cluster_ssh_password: ~azure.synapse.artifacts.models.SecretBase + :param additional_linked_service_names: Specifies additional storage accounts for the HDInsight + linked service so that the Data Factory service can register them on your behalf. + :type additional_linked_service_names: + list[~azure.synapse.artifacts.models.LinkedServiceReference] + :param hcatalog_linked_service_name: The name of Azure SQL linked service that point to the + HCatalog database. The on-demand HDInsight cluster is created by using the Azure SQL database + as the metastore. + :type hcatalog_linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param cluster_type: The cluster type. Type: string (or Expression with resultType string). + :type cluster_type: object + :param spark_version: The version of spark if the cluster type is 'spark'. Type: string (or + Expression with resultType string). + :type spark_version: object + :param core_configuration: Specifies the core configuration parameters (as in core-site.xml) + for the HDInsight cluster to be created. + :type core_configuration: object + :param h_base_configuration: Specifies the HBase configuration parameters (hbase-site.xml) for + the HDInsight cluster. + :type h_base_configuration: object + :param hdfs_configuration: Specifies the HDFS configuration parameters (hdfs-site.xml) for the + HDInsight cluster. + :type hdfs_configuration: object + :param hive_configuration: Specifies the hive configuration parameters (hive-site.xml) for the + HDInsight cluster. + :type hive_configuration: object + :param map_reduce_configuration: Specifies the MapReduce configuration parameters (mapred- + site.xml) for the HDInsight cluster. + :type map_reduce_configuration: object + :param oozie_configuration: Specifies the Oozie configuration parameters (oozie-site.xml) for + the HDInsight cluster. + :type oozie_configuration: object + :param storm_configuration: Specifies the Storm configuration parameters (storm-site.xml) for + the HDInsight cluster. + :type storm_configuration: object + :param yarn_configuration: Specifies the Yarn configuration parameters (yarn-site.xml) for the + HDInsight cluster. + :type yarn_configuration: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + :param head_node_size: Specifies the size of the head node for the HDInsight cluster. + :type head_node_size: object + :param data_node_size: Specifies the size of the data node for the HDInsight cluster. + :type data_node_size: object + :param zookeeper_node_size: Specifies the size of the Zoo Keeper node for the HDInsight + cluster. + :type zookeeper_node_size: object + :param script_actions: Custom script actions to run on HDI ondemand cluster once it's up. + Please refer to https://docs.microsoft.com/en-us/azure/hdinsight/hdinsight-hadoop-customize- + cluster-linux?toc=%2Fen-us%2Fazure%2Fhdinsight%2Fr-server%2FTOC.json&bc=%2Fen- + us%2Fazure%2Fbread%2Ftoc.json#understanding-script-actions. + :type script_actions: list[~azure.synapse.artifacts.models.ScriptAction] + :param virtual_network_id: The ARM resource ID for the vNet to which the cluster should be + joined after creation. Type: string (or Expression with resultType string). + :type virtual_network_id: object + :param subnet_name: The ARM resource ID for the subnet in the vNet. If virtualNetworkId was + specified, then this property is required. Type: string (or Expression with resultType string). + :type subnet_name: object """ _validation = { 'type': {'required': True}, + 'cluster_size': {'required': True}, + 'time_to_live': {'required': True}, + 'version': {'required': True}, 'linked_service_name': {'required': True}, + 'host_subscription_id': {'required': True}, + 'tenant': {'required': True}, + 'cluster_resource_group': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'cluster_size': {'key': 'typeProperties.clusterSize', 'type': 'object'}, + 'time_to_live': {'key': 'typeProperties.timeToLive', 'type': 'object'}, + 'version': {'key': 'typeProperties.version', 'type': 'object'}, + 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, + 'host_subscription_id': {'key': 'typeProperties.hostSubscriptionId', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'cluster_resource_group': {'key': 'typeProperties.clusterResourceGroup', 'type': 'object'}, + 'cluster_name_prefix': {'key': 'typeProperties.clusterNamePrefix', 'type': 'object'}, + 'cluster_user_name': {'key': 'typeProperties.clusterUserName', 'type': 'object'}, + 'cluster_password': {'key': 'typeProperties.clusterPassword', 'type': 'SecretBase'}, + 'cluster_ssh_user_name': {'key': 'typeProperties.clusterSshUserName', 'type': 'object'}, + 'cluster_ssh_password': {'key': 'typeProperties.clusterSshPassword', 'type': 'SecretBase'}, + 'additional_linked_service_names': {'key': 'typeProperties.additionalLinkedServiceNames', 'type': '[LinkedServiceReference]'}, + 'hcatalog_linked_service_name': {'key': 'typeProperties.hcatalogLinkedServiceName', 'type': 'LinkedServiceReference'}, + 'cluster_type': {'key': 'typeProperties.clusterType', 'type': 'object'}, + 'spark_version': {'key': 'typeProperties.sparkVersion', 'type': 'object'}, + 'core_configuration': {'key': 'typeProperties.coreConfiguration', 'type': 'object'}, + 'h_base_configuration': {'key': 'typeProperties.hBaseConfiguration', 'type': 'object'}, + 'hdfs_configuration': {'key': 'typeProperties.hdfsConfiguration', 'type': 'object'}, + 'hive_configuration': {'key': 'typeProperties.hiveConfiguration', 'type': 'object'}, + 'map_reduce_configuration': {'key': 'typeProperties.mapReduceConfiguration', 'type': 'object'}, + 'oozie_configuration': {'key': 'typeProperties.oozieConfiguration', 'type': 'object'}, + 'storm_configuration': {'key': 'typeProperties.stormConfiguration', 'type': 'object'}, + 'yarn_configuration': {'key': 'typeProperties.yarnConfiguration', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'head_node_size': {'key': 'typeProperties.headNodeSize', 'type': 'object'}, + 'data_node_size': {'key': 'typeProperties.dataNodeSize', 'type': 'object'}, + 'zookeeper_node_size': {'key': 'typeProperties.zookeeperNodeSize', 'type': 'object'}, + 'script_actions': {'key': 'typeProperties.scriptActions', 'type': '[ScriptAction]'}, + 'virtual_network_id': {'key': 'typeProperties.virtualNetworkId', 'type': 'object'}, + 'subnet_name': {'key': 'typeProperties.subnetName', 'type': 'object'}, } def __init__( self, *, + cluster_size: object, + time_to_live: object, + version: object, linked_service_name: "LinkedServiceReference", + host_subscription_id: object, + tenant: object, + cluster_resource_group: object, additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, + service_principal_id: Optional[object] = None, + service_principal_key: Optional["SecretBase"] = None, + cluster_name_prefix: Optional[object] = None, + cluster_user_name: Optional[object] = None, + cluster_password: Optional["SecretBase"] = None, + cluster_ssh_user_name: Optional[object] = None, + cluster_ssh_password: Optional["SecretBase"] = None, + additional_linked_service_names: Optional[List["LinkedServiceReference"]] = None, + hcatalog_linked_service_name: Optional["LinkedServiceReference"] = None, + cluster_type: Optional[object] = None, + spark_version: Optional[object] = None, + core_configuration: Optional[object] = None, + h_base_configuration: Optional[object] = None, + hdfs_configuration: Optional[object] = None, + hive_configuration: Optional[object] = None, + map_reduce_configuration: Optional[object] = None, + oozie_configuration: Optional[object] = None, + storm_configuration: Optional[object] = None, + yarn_configuration: Optional[object] = None, + encrypted_credential: Optional[object] = None, + head_node_size: Optional[object] = None, + data_node_size: Optional[object] = None, + zookeeper_node_size: Optional[object] = None, + script_actions: Optional[List["ScriptAction"]] = None, + virtual_network_id: Optional[object] = None, + subnet_name: Optional[object] = None, **kwargs ): - super(GoogleAdWordsObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'GoogleAdWordsObject' # type: str - self.table_name = table_name + super(HDInsightOnDemandLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'HDInsightOnDemand' # type: str + self.cluster_size = cluster_size + self.time_to_live = time_to_live + self.version = version + self.linked_service_name = linked_service_name + self.host_subscription_id = host_subscription_id + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.cluster_resource_group = cluster_resource_group + self.cluster_name_prefix = cluster_name_prefix + self.cluster_user_name = cluster_user_name + self.cluster_password = cluster_password + self.cluster_ssh_user_name = cluster_ssh_user_name + self.cluster_ssh_password = cluster_ssh_password + self.additional_linked_service_names = additional_linked_service_names + self.hcatalog_linked_service_name = hcatalog_linked_service_name + self.cluster_type = cluster_type + self.spark_version = spark_version + self.core_configuration = core_configuration + self.h_base_configuration = h_base_configuration + self.hdfs_configuration = hdfs_configuration + self.hive_configuration = hive_configuration + self.map_reduce_configuration = map_reduce_configuration + self.oozie_configuration = oozie_configuration + self.storm_configuration = storm_configuration + self.yarn_configuration = yarn_configuration + self.encrypted_credential = encrypted_credential + self.head_node_size = head_node_size + self.data_node_size = data_node_size + self.zookeeper_node_size = zookeeper_node_size + self.script_actions = script_actions + self.virtual_network_id = virtual_network_id + self.subnet_name = subnet_name -class GoogleBigQueryLinkedService(LinkedService): - """Google BigQuery service linked service. +class HDInsightPigActivity(ExecutionActivity): + """HDInsight Pig activity type. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. + :param description: Activity description. :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param project: Required. The default BigQuery project to query against. - :type project: object - :param additional_projects: A comma-separated list of public BigQuery projects to access. - :type additional_projects: object - :param request_google_drive_scope: Whether to request access to Google Drive. Allowing Google - Drive access enables support for federated tables that combine BigQuery data with data from - Google Drive. The default value is false. - :type request_google_drive_scope: object - :param authentication_type: Required. The OAuth 2.0 authentication mechanism used for - authentication. ServiceAuthentication can only be used on self-hosted IR. Possible values - include: "ServiceAuthentication", "UserAuthentication". - :type authentication_type: str or - ~azure.synapse.artifacts.models.GoogleBigQueryAuthenticationType - :param refresh_token: The refresh token obtained from Google for authorizing access to BigQuery - for UserAuthentication. - :type refresh_token: ~azure.synapse.artifacts.models.SecretBase - :param client_id: The client id of the google application used to acquire the refresh token. - Type: string (or Expression with resultType string). - :type client_id: object - :param client_secret: The client secret of the google application used to acquire the refresh - token. - :type client_secret: ~azure.synapse.artifacts.models.SecretBase - :param email: The service account email ID that is used for ServiceAuthentication and can only - be used on self-hosted IR. - :type email: object - :param key_file_path: The full path to the .p12 key file that is used to authenticate the - service account email address and can only be used on self-hosted IR. - :type key_file_path: object - :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for - verifying the server when connecting over SSL. This property can only be set when using SSL on - self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust - store or from a specified PEM file. The default value is false. - :type use_system_trust_store: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param linked_service_name: Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param storage_linked_services: Storage linked service references. + :type storage_linked_services: list[~azure.synapse.artifacts.models.LinkedServiceReference] + :param arguments: User specified arguments to HDInsightActivity. Type: array (or Expression + with resultType array). + :type arguments: object + :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". + :type get_debug_info: str or ~azure.synapse.artifacts.models.HDInsightActivityDebugInfoOption + :param script_path: Script path. Type: string (or Expression with resultType string). + :type script_path: object + :param script_linked_service: Script linked service reference. + :type script_linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference + :param defines: Allows user to specify defines for Pig job request. + :type defines: dict[str, object] """ _validation = { + 'name': {'required': True}, 'type': {'required': True}, - 'project': {'required': True}, - 'authentication_type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'project': {'key': 'typeProperties.project', 'type': 'object'}, - 'additional_projects': {'key': 'typeProperties.additionalProjects', 'type': 'object'}, - 'request_google_drive_scope': {'key': 'typeProperties.requestGoogleDriveScope', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'email': {'key': 'typeProperties.email', 'type': 'object'}, - 'key_file_path': {'key': 'typeProperties.keyFilePath', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, + 'arguments': {'key': 'typeProperties.arguments', 'type': 'object'}, + 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, + 'script_path': {'key': 'typeProperties.scriptPath', 'type': 'object'}, + 'script_linked_service': {'key': 'typeProperties.scriptLinkedService', 'type': 'LinkedServiceReference'}, + 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, } def __init__( self, *, - project: object, - authentication_type: Union[str, "GoogleBigQueryAuthenticationType"], + name: str, additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - additional_projects: Optional[object] = None, - request_google_drive_scope: Optional[object] = None, - refresh_token: Optional["SecretBase"] = None, - client_id: Optional[object] = None, - client_secret: Optional["SecretBase"] = None, - email: Optional[object] = None, - key_file_path: Optional[object] = None, - trusted_cert_path: Optional[object] = None, - use_system_trust_store: Optional[object] = None, - encrypted_credential: Optional[object] = None, + depends_on: Optional[List["ActivityDependency"]] = None, + user_properties: Optional[List["UserProperty"]] = None, + linked_service_name: Optional["LinkedServiceReference"] = None, + policy: Optional["ActivityPolicy"] = None, + storage_linked_services: Optional[List["LinkedServiceReference"]] = None, + arguments: Optional[object] = None, + get_debug_info: Optional[Union[str, "HDInsightActivityDebugInfoOption"]] = None, + script_path: Optional[object] = None, + script_linked_service: Optional["LinkedServiceReference"] = None, + defines: Optional[Dict[str, object]] = None, **kwargs ): - super(GoogleBigQueryLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'GoogleBigQuery' # type: str - self.project = project - self.additional_projects = additional_projects - self.request_google_drive_scope = request_google_drive_scope - self.authentication_type = authentication_type - self.refresh_token = refresh_token - self.client_id = client_id - self.client_secret = client_secret - self.email = email - self.key_file_path = key_file_path - self.trusted_cert_path = trusted_cert_path - self.use_system_trust_store = use_system_trust_store - self.encrypted_credential = encrypted_credential + super(HDInsightPigActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.type = 'HDInsightPig' # type: str + self.storage_linked_services = storage_linked_services + self.arguments = arguments + self.get_debug_info = get_debug_info + self.script_path = script_path + self.script_linked_service = script_linked_service + self.defines = defines -class GoogleBigQueryObjectDataset(Dataset): - """Google BigQuery service dataset. +class HDInsightSparkActivity(ExecutionActivity): + """HDInsight Spark activity. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. :type type: str - :param description: Dataset description. + :param description: Activity description. :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: This property will be retired. Please consider using database + table - properties instead. - :type table_name: object - :param table: The table name of the Google BigQuery. Type: string (or Expression with - resultType string). - :type table: object - :param dataset: The database name of the Google BigQuery. Type: string (or Expression with - resultType string). - :type dataset: object + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param root_path: Required. The root path in 'sparkJobLinkedService' for all the job’s files. + Type: string (or Expression with resultType string). + :type root_path: object + :param entry_file_path: Required. The relative path to the root folder of the code/package to + be executed. Type: string (or Expression with resultType string). + :type entry_file_path: object + :param arguments: The user-specified arguments to HDInsightSparkActivity. + :type arguments: list[object] + :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". + :type get_debug_info: str or ~azure.synapse.artifacts.models.HDInsightActivityDebugInfoOption + :param spark_job_linked_service: The storage linked service for uploading the entry file and + dependencies, and for receiving logs. + :type spark_job_linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference + :param class_name: The application's Java/Spark main class. + :type class_name: str + :param proxy_user: The user to impersonate that will execute the job. Type: string (or + Expression with resultType string). + :type proxy_user: object + :param spark_config: Spark configuration property. + :type spark_config: dict[str, object] """ _validation = { + 'name': {'required': True}, 'type': {'required': True}, - 'linked_service_name': {'required': True}, + 'root_path': {'required': True}, + 'entry_file_path': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'dataset': {'key': 'typeProperties.dataset', 'type': 'object'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'root_path': {'key': 'typeProperties.rootPath', 'type': 'object'}, + 'entry_file_path': {'key': 'typeProperties.entryFilePath', 'type': 'object'}, + 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, + 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, + 'spark_job_linked_service': {'key': 'typeProperties.sparkJobLinkedService', 'type': 'LinkedServiceReference'}, + 'class_name': {'key': 'typeProperties.className', 'type': 'str'}, + 'proxy_user': {'key': 'typeProperties.proxyUser', 'type': 'object'}, + 'spark_config': {'key': 'typeProperties.sparkConfig', 'type': '{object}'}, } def __init__( self, *, - linked_service_name: "LinkedServiceReference", + name: str, + root_path: object, + entry_file_path: object, additional_properties: Optional[Dict[str, object]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, - table: Optional[object] = None, - dataset: Optional[object] = None, + depends_on: Optional[List["ActivityDependency"]] = None, + user_properties: Optional[List["UserProperty"]] = None, + linked_service_name: Optional["LinkedServiceReference"] = None, + policy: Optional["ActivityPolicy"] = None, + arguments: Optional[List[object]] = None, + get_debug_info: Optional[Union[str, "HDInsightActivityDebugInfoOption"]] = None, + spark_job_linked_service: Optional["LinkedServiceReference"] = None, + class_name: Optional[str] = None, + proxy_user: Optional[object] = None, + spark_config: Optional[Dict[str, object]] = None, **kwargs ): - super(GoogleBigQueryObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'GoogleBigQueryObject' # type: str - self.table_name = table_name - self.table = table - self.dataset = dataset + super(HDInsightSparkActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.type = 'HDInsightSpark' # type: str + self.root_path = root_path + self.entry_file_path = entry_file_path + self.arguments = arguments + self.get_debug_info = get_debug_info + self.spark_job_linked_service = spark_job_linked_service + self.class_name = class_name + self.proxy_user = proxy_user + self.spark_config = spark_config -class GoogleCloudStorageLinkedService(LinkedService): - """Linked service for Google Cloud Storage. +class HDInsightStreamingActivity(ExecutionActivity): + """HDInsight streaming activity type. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. + :param description: Activity description. :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param access_key_id: The access key identifier of the Google Cloud Storage Identity and Access - Management (IAM) user. Type: string (or Expression with resultType string). - :type access_key_id: object - :param secret_access_key: The secret access key of the Google Cloud Storage Identity and Access - Management (IAM) user. - :type secret_access_key: ~azure.synapse.artifacts.models.SecretBase - :param service_url: This value specifies the endpoint to access with the Google Cloud Storage - Connector. This is an optional property; change it only if you want to try a different service - endpoint or want to switch between https and http. Type: string (or Expression with resultType + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param linked_service_name: Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param storage_linked_services: Storage linked service references. + :type storage_linked_services: list[~azure.synapse.artifacts.models.LinkedServiceReference] + :param arguments: User specified arguments to HDInsightActivity. + :type arguments: list[object] + :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". + :type get_debug_info: str or ~azure.synapse.artifacts.models.HDInsightActivityDebugInfoOption + :param mapper: Required. Mapper executable name. Type: string (or Expression with resultType string). - :type service_url: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :type mapper: object + :param reducer: Required. Reducer executable name. Type: string (or Expression with resultType + string). + :type reducer: object + :param input: Required. Input blob path. Type: string (or Expression with resultType string). + :type input: object + :param output: Required. Output blob path. Type: string (or Expression with resultType string). + :type output: object + :param file_paths: Required. Paths to streaming job files. Can be directories. + :type file_paths: list[object] + :param file_linked_service: Linked service reference where the files are located. + :type file_linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference + :param combiner: Combiner executable name. Type: string (or Expression with resultType string). + :type combiner: object + :param command_environment: Command line environment values. + :type command_environment: list[object] + :param defines: Allows user to specify defines for streaming job request. + :type defines: dict[str, object] """ _validation = { + 'name': {'required': True}, 'type': {'required': True}, + 'mapper': {'required': True}, + 'reducer': {'required': True}, + 'input': {'required': True}, + 'output': {'required': True}, + 'file_paths': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, - 'secret_access_key': {'key': 'typeProperties.secretAccessKey', 'type': 'SecretBase'}, - 'service_url': {'key': 'typeProperties.serviceUrl', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, + 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, + 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, + 'mapper': {'key': 'typeProperties.mapper', 'type': 'object'}, + 'reducer': {'key': 'typeProperties.reducer', 'type': 'object'}, + 'input': {'key': 'typeProperties.input', 'type': 'object'}, + 'output': {'key': 'typeProperties.output', 'type': 'object'}, + 'file_paths': {'key': 'typeProperties.filePaths', 'type': '[object]'}, + 'file_linked_service': {'key': 'typeProperties.fileLinkedService', 'type': 'LinkedServiceReference'}, + 'combiner': {'key': 'typeProperties.combiner', 'type': 'object'}, + 'command_environment': {'key': 'typeProperties.commandEnvironment', 'type': '[object]'}, + 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, } def __init__( self, *, + name: str, + mapper: object, + reducer: object, + input: object, + output: object, + file_paths: List[object], additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - access_key_id: Optional[object] = None, - secret_access_key: Optional["SecretBase"] = None, - service_url: Optional[object] = None, - encrypted_credential: Optional[object] = None, + depends_on: Optional[List["ActivityDependency"]] = None, + user_properties: Optional[List["UserProperty"]] = None, + linked_service_name: Optional["LinkedServiceReference"] = None, + policy: Optional["ActivityPolicy"] = None, + storage_linked_services: Optional[List["LinkedServiceReference"]] = None, + arguments: Optional[List[object]] = None, + get_debug_info: Optional[Union[str, "HDInsightActivityDebugInfoOption"]] = None, + file_linked_service: Optional["LinkedServiceReference"] = None, + combiner: Optional[object] = None, + command_environment: Optional[List[object]] = None, + defines: Optional[Dict[str, object]] = None, **kwargs ): - super(GoogleCloudStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'GoogleCloudStorage' # type: str - self.access_key_id = access_key_id - self.secret_access_key = secret_access_key - self.service_url = service_url - self.encrypted_credential = encrypted_credential + super(HDInsightStreamingActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.type = 'HDInsightStreaming' # type: str + self.storage_linked_services = storage_linked_services + self.arguments = arguments + self.get_debug_info = get_debug_info + self.mapper = mapper + self.reducer = reducer + self.input = input + self.output = output + self.file_paths = file_paths + self.file_linked_service = file_linked_service + self.combiner = combiner + self.command_environment = command_environment + self.defines = defines -class GreenplumLinkedService(LinkedService): - """Greenplum Database linked service. +class HiveLinkedService(LinkedService): + """Hive Server linked service. All required parameters must be populated in order to send to Azure. @@ -9805,11 +17124,53 @@ class GreenplumLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param connection_string: An ODBC connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param pwd: The Azure key vault secret reference of password in connection string. - :type pwd: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param host: Required. IP address or host name of the Hive server, separated by ';' for + multiple hosts (only when serviceDiscoveryMode is enable). + :type host: object + :param port: The TCP port that the Hive server uses to listen for client connections. + :type port: object + :param server_type: The type of Hive server. Possible values include: "HiveServer1", + "HiveServer2", "HiveThriftServer". + :type server_type: str or ~azure.synapse.artifacts.models.HiveServerType + :param thrift_transport_protocol: The transport protocol to use in the Thrift layer. Possible + values include: "Binary", "SASL", "HTTP ". + :type thrift_transport_protocol: str or + ~azure.synapse.artifacts.models.HiveThriftTransportProtocol + :param authentication_type: Required. The authentication method used to access the Hive server. + Possible values include: "Anonymous", "Username", "UsernameAndPassword", + "WindowsAzureHDInsightService". + :type authentication_type: str or ~azure.synapse.artifacts.models.HiveAuthenticationType + :param service_discovery_mode: true to indicate using the ZooKeeper service, false not. + :type service_discovery_mode: object + :param zoo_keeper_name_space: The namespace on ZooKeeper under which Hive Server 2 nodes are + added. + :type zoo_keeper_name_space: object + :param use_native_query: Specifies whether the driver uses native HiveQL queries,or converts + them into an equivalent form in HiveQL. + :type use_native_query: object + :param username: The user name that you use to access Hive Server. + :type username: object + :param password: The password corresponding to the user name that you provided in the Username + field. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param http_path: The partial URL corresponding to the Hive server. + :type http_path: object + :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The + default value is false. + :type enable_ssl: object + :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for + verifying the server when connecting over SSL. This property can only be set when using SSL on + self-hosted IR. The default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust + store or from a specified PEM file. The default value is false. + :type use_system_trust_store: object + :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate + name to match the host name of the server when connecting over SSL. The default value is false. + :type allow_host_name_cn_mismatch: object + :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from + the server. The default value is false. + :type allow_self_signed_server_cert: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -9818,6 +17179,8 @@ class GreenplumLinkedService(LinkedService): _validation = { 'type': {'required': True}, + 'host': {'required': True}, + 'authentication_type': {'required': True}, } _attribute_map = { @@ -9827,33 +17190,75 @@ class GreenplumLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'server_type': {'key': 'typeProperties.serverType', 'type': 'str'}, + 'thrift_transport_protocol': {'key': 'typeProperties.thriftTransportProtocol', 'type': 'str'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'service_discovery_mode': {'key': 'typeProperties.serviceDiscoveryMode', 'type': 'object'}, + 'zoo_keeper_name_space': {'key': 'typeProperties.zooKeeperNameSpace', 'type': 'object'}, + 'use_native_query': {'key': 'typeProperties.useNativeQuery', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, + 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, *, + host: object, + authentication_type: Union[str, "HiveAuthenticationType"], additional_properties: Optional[Dict[str, object]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, - connection_string: Optional[object] = None, - pwd: Optional["AzureKeyVaultSecretReference"] = None, + port: Optional[object] = None, + server_type: Optional[Union[str, "HiveServerType"]] = None, + thrift_transport_protocol: Optional[Union[str, "HiveThriftTransportProtocol"]] = None, + service_discovery_mode: Optional[object] = None, + zoo_keeper_name_space: Optional[object] = None, + use_native_query: Optional[object] = None, + username: Optional[object] = None, + password: Optional["SecretBase"] = None, + http_path: Optional[object] = None, + enable_ssl: Optional[object] = None, + trusted_cert_path: Optional[object] = None, + use_system_trust_store: Optional[object] = None, + allow_host_name_cn_mismatch: Optional[object] = None, + allow_self_signed_server_cert: Optional[object] = None, encrypted_credential: Optional[object] = None, **kwargs ): - super(GreenplumLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'Greenplum' # type: str - self.connection_string = connection_string - self.pwd = pwd + super(HiveLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'Hive' # type: str + self.host = host + self.port = port + self.server_type = server_type + self.thrift_transport_protocol = thrift_transport_protocol + self.authentication_type = authentication_type + self.service_discovery_mode = service_discovery_mode + self.zoo_keeper_name_space = zoo_keeper_name_space + self.use_native_query = use_native_query + self.username = username + self.password = password + self.http_path = http_path + self.enable_ssl = enable_ssl + self.trusted_cert_path = trusted_cert_path + self.use_system_trust_store = use_system_trust_store + self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch + self.allow_self_signed_server_cert = allow_self_signed_server_cert self.encrypted_credential = encrypted_credential -class GreenplumTableDataset(Dataset): - """Greenplum Database dataset. +class HiveObjectDataset(Dataset): + """Hive Server dataset. All required parameters must be populated in order to send to Azure. @@ -9882,9 +17287,9 @@ class GreenplumTableDataset(Dataset): :param table_name: This property will be retired. Please consider using schema + table properties instead. :type table_name: object - :param table: The table name of Greenplum. Type: string (or Expression with resultType string). + :param table: The table name of the Hive. Type: string (or Expression with resultType string). :type table: object - :param schema_type_properties_schema: The schema name of Greenplum. Type: string (or Expression + :param schema_type_properties_schema: The schema name of the Hive. Type: string (or Expression with resultType string). :type schema_type_properties_schema: object """ @@ -9925,15 +17330,72 @@ def __init__( schema_type_properties_schema: Optional[object] = None, **kwargs ): - super(GreenplumTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'GreenplumTable' # type: str + super(HiveObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'HiveObject' # type: str self.table_name = table_name self.table = table self.schema_type_properties_schema = schema_type_properties_schema -class HBaseLinkedService(LinkedService): - """HBase server linked service. +class HiveSource(TabularSource): + """A copy activity Hive Server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(HiveSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'HiveSource' # type: str + self.query = query + + +class HttpLinkedService(LinkedService): + """Linked service for an HTTP source. All required parameters must be populated in order to send to Azure. @@ -9950,44 +17412,40 @@ class HBaseLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param host: Required. The IP address or host name of the HBase server. (i.e. 192.168.222.160). - :type host: object - :param port: The TCP port that the HBase instance uses to listen for client connections. The - default value is 9090. - :type port: object - :param http_path: The partial URL corresponding to the HBase server. (i.e. - /gateway/sandbox/hbase/version). - :type http_path: object - :param authentication_type: Required. The authentication mechanism to use to connect to the - HBase server. Possible values include: "Anonymous", "Basic". - :type authentication_type: str or ~azure.synapse.artifacts.models.HBaseAuthenticationType - :param username: The user name used to connect to the HBase instance. - :type username: object - :param password: The password corresponding to the user name. + :param url: Required. The base URL of the HTTP endpoint, e.g. http://www.microsoft.com. Type: + string (or Expression with resultType string). + :type url: object + :param authentication_type: The authentication type to be used to connect to the HTTP server. + Possible values include: "Basic", "Anonymous", "Digest", "Windows", "ClientCertificate". + :type authentication_type: str or ~azure.synapse.artifacts.models.HttpAuthenticationType + :param user_name: User name for Basic, Digest, or Windows authentication. Type: string (or + Expression with resultType string). + :type user_name: object + :param password: Password for Basic, Digest, Windows, or ClientCertificate with + EmbeddedCertData authentication. :type password: ~azure.synapse.artifacts.models.SecretBase - :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The - default value is false. - :type enable_ssl: object - :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for - verifying the server when connecting over SSL. This property can only be set when using SSL on - self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate - name to match the host name of the server when connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: object - :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from - the server. The default value is false. - :type allow_self_signed_server_cert: object + :param embedded_cert_data: Base64 encoded certificate data for ClientCertificate + authentication. For on-premises copy with ClientCertificate authentication, either + CertThumbprint or EmbeddedCertData/Password should be specified. Type: string (or Expression + with resultType string). + :type embedded_cert_data: object + :param cert_thumbprint: Thumbprint of certificate for ClientCertificate authentication. Only + valid for on-premises copy. For on-premises copy with ClientCertificate authentication, either + CertThumbprint or EmbeddedCertData/Password should be specified. Type: string (or Expression + with resultType string). + :type cert_thumbprint: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object + :param enable_server_certificate_validation: If true, validate the HTTPS server SSL + certificate. Default value is true. Type: boolean (or Expression with resultType boolean). + :type enable_server_certificate_validation: object """ _validation = { 'type': {'required': True}, - 'host': {'required': True}, - 'authentication_type': {'required': True}, + 'url': {'required': True}, } _attribute_map = { @@ -9997,300 +17455,209 @@ class HBaseLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, - 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, + 'embedded_cert_data': {'key': 'typeProperties.embeddedCertData', 'type': 'object'}, + 'cert_thumbprint': {'key': 'typeProperties.certThumbprint', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, } def __init__( self, *, - host: object, - authentication_type: Union[str, "HBaseAuthenticationType"], + url: object, additional_properties: Optional[Dict[str, object]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, - port: Optional[object] = None, - http_path: Optional[object] = None, - username: Optional[object] = None, + authentication_type: Optional[Union[str, "HttpAuthenticationType"]] = None, + user_name: Optional[object] = None, password: Optional["SecretBase"] = None, - enable_ssl: Optional[object] = None, - trusted_cert_path: Optional[object] = None, - allow_host_name_cn_mismatch: Optional[object] = None, - allow_self_signed_server_cert: Optional[object] = None, + embedded_cert_data: Optional[object] = None, + cert_thumbprint: Optional[object] = None, encrypted_credential: Optional[object] = None, + enable_server_certificate_validation: Optional[object] = None, **kwargs ): - super(HBaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'HBase' # type: str - self.host = host - self.port = port - self.http_path = http_path + super(HttpLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'HttpServer' # type: str + self.url = url self.authentication_type = authentication_type - self.username = username + self.user_name = user_name self.password = password - self.enable_ssl = enable_ssl - self.trusted_cert_path = trusted_cert_path - self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch - self.allow_self_signed_server_cert = allow_self_signed_server_cert + self.embedded_cert_data = embedded_cert_data + self.cert_thumbprint = cert_thumbprint self.encrypted_credential = encrypted_credential + self.enable_server_certificate_validation = enable_server_certificate_validation -class HBaseObjectDataset(Dataset): - """HBase server dataset. +class HttpReadSettings(StoreReadSettings): + """Sftp read settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. The read setting type.Constant filled by server. :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param request_method: The HTTP method used to call the RESTful API. The default is GET. Type: + string (or Expression with resultType string). + :type request_method: object + :param request_body: The HTTP request body to the RESTful API if requestMethod is POST. Type: + string (or Expression with resultType string). + :type request_body: object + :param additional_headers: The additional HTTP headers in the request to the RESTful API. Type: + string (or Expression with resultType string). + :type additional_headers: object + :param request_timeout: Specifies the timeout for a HTTP client to get HTTP response from HTTP + server. + :type request_timeout: object """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'request_method': {'key': 'requestMethod', 'type': 'object'}, + 'request_body': {'key': 'requestBody', 'type': 'object'}, + 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, + 'request_timeout': {'key': 'requestTimeout', 'type': 'object'}, } def __init__( self, *, - linked_service_name: "LinkedServiceReference", additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + request_method: Optional[object] = None, + request_body: Optional[object] = None, + additional_headers: Optional[object] = None, + request_timeout: Optional[object] = None, **kwargs ): - super(HBaseObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'HBaseObject' # type: str - self.table_name = table_name + super(HttpReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'HttpReadSettings' # type: str + self.request_method = request_method + self.request_body = request_body + self.additional_headers = additional_headers + self.request_timeout = request_timeout -class HdfsLinkedService(LinkedService): - """Hadoop Distributed File System (HDFS) linked service. +class HttpServerLocation(DatasetLocation): + """The location of http server. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Type of dataset storage location.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param url: Required. The URL of the HDFS service endpoint, e.g. - http://myhostname:50070/webhdfs/v1 . Type: string (or Expression with resultType string). - :type url: object - :param authentication_type: Type of authentication used to connect to the HDFS. Possible values - are: Anonymous and Windows. Type: string (or Expression with resultType string). - :type authentication_type: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with + :param folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :type encrypted_credential: object - :param user_name: User name for Windows authentication. Type: string (or Expression with + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + string). + :type file_name: object + :param relative_url: Specify the relativeUrl of http server. Type: string (or Expression with resultType string). - :type user_name: object - :param password: Password for Windows authentication. - :type password: ~azure.synapse.artifacts.models.SecretBase + :type relative_url: object """ _validation = { 'type': {'required': True}, - 'url': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'relative_url': {'key': 'relativeUrl', 'type': 'object'}, } def __init__( self, *, - url: object, additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, - description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - authentication_type: Optional[object] = None, - encrypted_credential: Optional[object] = None, - user_name: Optional[object] = None, - password: Optional["SecretBase"] = None, + folder_path: Optional[object] = None, + file_name: Optional[object] = None, + relative_url: Optional[object] = None, **kwargs ): - super(HdfsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'Hdfs' # type: str - self.url = url - self.authentication_type = authentication_type - self.encrypted_credential = encrypted_credential - self.user_name = user_name - self.password = password + super(HttpServerLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) + self.type = 'HttpServerLocation' # type: str + self.relative_url = relative_url -class HDInsightHiveActivity(ExecutionActivity): - """HDInsight Hive activity type. +class HttpSource(CopySource): + """A copy activity source for an HTTP file. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param storage_linked_services: Storage linked service references. - :type storage_linked_services: list[~azure.synapse.artifacts.models.LinkedServiceReference] - :param arguments: User specified arguments to HDInsightActivity. - :type arguments: list[object] - :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". - :type get_debug_info: str or ~azure.synapse.artifacts.models.HDInsightActivityDebugInfoOption - :param script_path: Script path. Type: string (or Expression with resultType string). - :type script_path: object - :param script_linked_service: Script linked service reference. - :type script_linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference - :param defines: Allows user to specify defines for Hive job request. - :type defines: dict[str, object] - :param variables: User specified arguments under hivevar namespace. - :type variables: list[object] - :param query_timeout: Query timeout value (in minutes). Effective when the HDInsight cluster - is with ESP (Enterprise Security Package). - :type query_timeout: int + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param http_request_timeout: Specifies the timeout for a HTTP client to get HTTP response from + HTTP server. The default value is equivalent to System.Net.HttpWebRequest.Timeout. Type: string + (or Expression with resultType string), pattern: + ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type http_request_timeout: object """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, - 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, - 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, - 'script_path': {'key': 'typeProperties.scriptPath', 'type': 'object'}, - 'script_linked_service': {'key': 'typeProperties.scriptLinkedService', 'type': 'LinkedServiceReference'}, - 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, - 'variables': {'key': 'typeProperties.variables', 'type': '[object]'}, - 'query_timeout': {'key': 'typeProperties.queryTimeout', 'type': 'int'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, } def __init__( self, *, - name: str, additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - depends_on: Optional[List["ActivityDependency"]] = None, - user_properties: Optional[List["UserProperty"]] = None, - linked_service_name: Optional["LinkedServiceReference"] = None, - policy: Optional["ActivityPolicy"] = None, - storage_linked_services: Optional[List["LinkedServiceReference"]] = None, - arguments: Optional[List[object]] = None, - get_debug_info: Optional[Union[str, "HDInsightActivityDebugInfoOption"]] = None, - script_path: Optional[object] = None, - script_linked_service: Optional["LinkedServiceReference"] = None, - defines: Optional[Dict[str, object]] = None, - variables: Optional[List[object]] = None, - query_timeout: Optional[int] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + http_request_timeout: Optional[object] = None, **kwargs ): - super(HDInsightHiveActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type = 'HDInsightHive' # type: str - self.storage_linked_services = storage_linked_services - self.arguments = arguments - self.get_debug_info = get_debug_info - self.script_path = script_path - self.script_linked_service = script_linked_service - self.defines = defines - self.variables = variables - self.query_timeout = query_timeout + super(HttpSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'HttpSource' # type: str + self.http_request_timeout = http_request_timeout -class HDInsightLinkedService(LinkedService): - """HDInsight linked service. +class HubspotLinkedService(LinkedService): + """Hubspot Service linked service. All required parameters must be populated in order to send to Azure. @@ -10307,34 +17674,35 @@ class HDInsightLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param cluster_uri: Required. HDInsight cluster URI. Type: string (or Expression with - resultType string). - :type cluster_uri: object - :param user_name: HDInsight cluster user name. Type: string (or Expression with resultType - string). - :type user_name: object - :param password: HDInsight cluster password. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param linked_service_name: The Azure Storage linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param hcatalog_linked_service_name: A reference to the Azure SQL linked service that points to - the HCatalog database. - :type hcatalog_linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param client_id: Required. The client ID associated with your Hubspot application. + :type client_id: object + :param client_secret: The client secret associated with your Hubspot application. + :type client_secret: ~azure.synapse.artifacts.models.SecretBase + :param access_token: The access token obtained when initially authenticating your OAuth + integration. + :type access_token: ~azure.synapse.artifacts.models.SecretBase + :param refresh_token: The refresh token obtained when initially authenticating your OAuth + integration. + :type refresh_token: ~azure.synapse.artifacts.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using + HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name in the server's + certificate to match the host name of the server when connecting over SSL. The default value is + true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of the server when + connecting over SSL. The default value is true. + :type use_peer_verification: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object - :param is_esp_enabled: Specify if the HDInsight is created with ESP (Enterprise Security - Package). Type: Boolean. - :type is_esp_enabled: object - :param file_system: Specify the FileSystem if the main storage for the HDInsight is ADLS Gen2. - Type: string (or Expression with resultType string). - :type file_system: object """ _validation = { 'type': {'required': True}, - 'cluster_uri': {'required': True}, + 'client_id': {'required': True}, } _attribute_map = { @@ -10344,399 +17712,173 @@ class HDInsightLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'cluster_uri': {'key': 'typeProperties.clusterUri', 'type': 'object'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, - 'hcatalog_linked_service_name': {'key': 'typeProperties.hcatalogLinkedServiceName', 'type': 'LinkedServiceReference'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'is_esp_enabled': {'key': 'typeProperties.isEspEnabled', 'type': 'object'}, - 'file_system': {'key': 'typeProperties.fileSystem', 'type': 'object'}, } def __init__( self, *, - cluster_uri: object, + client_id: object, additional_properties: Optional[Dict[str, object]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, - user_name: Optional[object] = None, - password: Optional["SecretBase"] = None, - linked_service_name: Optional["LinkedServiceReference"] = None, - hcatalog_linked_service_name: Optional["LinkedServiceReference"] = None, + client_secret: Optional["SecretBase"] = None, + access_token: Optional["SecretBase"] = None, + refresh_token: Optional["SecretBase"] = None, + use_encrypted_endpoints: Optional[object] = None, + use_host_verification: Optional[object] = None, + use_peer_verification: Optional[object] = None, encrypted_credential: Optional[object] = None, - is_esp_enabled: Optional[object] = None, - file_system: Optional[object] = None, **kwargs ): - super(HDInsightLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'HDInsight' # type: str - self.cluster_uri = cluster_uri - self.user_name = user_name - self.password = password - self.linked_service_name = linked_service_name - self.hcatalog_linked_service_name = hcatalog_linked_service_name + super(HubspotLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'Hubspot' # type: str + self.client_id = client_id + self.client_secret = client_secret + self.access_token = access_token + self.refresh_token = refresh_token + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification self.encrypted_credential = encrypted_credential - self.is_esp_enabled = is_esp_enabled - self.file_system = file_system -class HDInsightMapReduceActivity(ExecutionActivity): - """HDInsight MapReduce activity type. +class HubspotObjectDataset(Dataset): + """Hubspot Service dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. Type of dataset.Constant filled by server. :type type: str - :param description: Activity description. + :param description: Dataset description. :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param storage_linked_services: Storage linked service references. - :type storage_linked_services: list[~azure.synapse.artifacts.models.LinkedServiceReference] - :param arguments: User specified arguments to HDInsightActivity. - :type arguments: list[object] - :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". - :type get_debug_info: str or ~azure.synapse.artifacts.models.HDInsightActivityDebugInfoOption - :param class_name: Required. Class name. Type: string (or Expression with resultType string). - :type class_name: object - :param jar_file_path: Required. Jar path. Type: string (or Expression with resultType string). - :type jar_file_path: object - :param jar_linked_service: Jar linked service reference. - :type jar_linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference - :param jar_libs: Jar libs. - :type jar_libs: list[object] - :param defines: Allows user to specify defines for the MapReduce job request. - :type defines: dict[str, object] + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: The table name. Type: string (or Expression with resultType string). + :type table_name: object """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, - 'class_name': {'required': True}, - 'jar_file_path': {'required': True}, + 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, - 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, - 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, - 'class_name': {'key': 'typeProperties.className', 'type': 'object'}, - 'jar_file_path': {'key': 'typeProperties.jarFilePath', 'type': 'object'}, - 'jar_linked_service': {'key': 'typeProperties.jarLinkedService', 'type': 'LinkedServiceReference'}, - 'jar_libs': {'key': 'typeProperties.jarLibs', 'type': '[object]'}, - 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } def __init__( self, *, - name: str, - class_name: object, - jar_file_path: object, + linked_service_name: "LinkedServiceReference", additional_properties: Optional[Dict[str, object]] = None, description: Optional[str] = None, - depends_on: Optional[List["ActivityDependency"]] = None, - user_properties: Optional[List["UserProperty"]] = None, - linked_service_name: Optional["LinkedServiceReference"] = None, - policy: Optional["ActivityPolicy"] = None, - storage_linked_services: Optional[List["LinkedServiceReference"]] = None, - arguments: Optional[List[object]] = None, - get_debug_info: Optional[Union[str, "HDInsightActivityDebugInfoOption"]] = None, - jar_linked_service: Optional["LinkedServiceReference"] = None, - jar_libs: Optional[List[object]] = None, - defines: Optional[Dict[str, object]] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + table_name: Optional[object] = None, **kwargs ): - super(HDInsightMapReduceActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type = 'HDInsightMapReduce' # type: str - self.storage_linked_services = storage_linked_services - self.arguments = arguments - self.get_debug_info = get_debug_info - self.class_name = class_name - self.jar_file_path = jar_file_path - self.jar_linked_service = jar_linked_service - self.jar_libs = jar_libs - self.defines = defines + super(HubspotObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'HubspotObject' # type: str + self.table_name = table_name -class HDInsightOnDemandLinkedService(LinkedService): - """HDInsight ondemand linked service. +class HubspotSource(TabularSource): + """A copy activity Hubspot Service source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param cluster_size: Required. Number of worker/data nodes in the cluster. Suggestion value: 4. - Type: string (or Expression with resultType string). - :type cluster_size: object - :param time_to_live: Required. The allowed idle time for the on-demand HDInsight cluster. - Specifies how long the on-demand HDInsight cluster stays alive after completion of an activity - run if there are no other active jobs in the cluster. The minimum value is 5 mins. Type: string - (or Expression with resultType string). - :type time_to_live: object - :param version: Required. Version of the HDInsight cluster.  Type: string (or Expression with - resultType string). - :type version: object - :param linked_service_name: Required. Azure Storage linked service to be used by the on-demand - cluster for storing and processing data. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param host_subscription_id: Required. The customer’s subscription to host the cluster. Type: - string (or Expression with resultType string). - :type host_subscription_id: object - :param service_principal_id: The service principal id for the hostSubscriptionId. Type: string - (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The key for the service principal id. - :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase - :param tenant: Required. The Tenant id/name to which the service principal belongs. Type: - string (or Expression with resultType string). - :type tenant: object - :param cluster_resource_group: Required. The resource group where the cluster belongs. Type: - string (or Expression with resultType string). - :type cluster_resource_group: object - :param cluster_name_prefix: The prefix of cluster name, postfix will be distinct with - timestamp. Type: string (or Expression with resultType string). - :type cluster_name_prefix: object - :param cluster_user_name: The username to access the cluster. Type: string (or Expression with - resultType string). - :type cluster_user_name: object - :param cluster_password: The password to access the cluster. - :type cluster_password: ~azure.synapse.artifacts.models.SecretBase - :param cluster_ssh_user_name: The username to SSH remotely connect to cluster’s node (for - Linux). Type: string (or Expression with resultType string). - :type cluster_ssh_user_name: object - :param cluster_ssh_password: The password to SSH remotely connect cluster’s node (for Linux). - :type cluster_ssh_password: ~azure.synapse.artifacts.models.SecretBase - :param additional_linked_service_names: Specifies additional storage accounts for the HDInsight - linked service so that the Data Factory service can register them on your behalf. - :type additional_linked_service_names: - list[~azure.synapse.artifacts.models.LinkedServiceReference] - :param hcatalog_linked_service_name: The name of Azure SQL linked service that point to the - HCatalog database. The on-demand HDInsight cluster is created by using the Azure SQL database - as the metastore. - :type hcatalog_linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param cluster_type: The cluster type. Type: string (or Expression with resultType string). - :type cluster_type: object - :param spark_version: The version of spark if the cluster type is 'spark'. Type: string (or - Expression with resultType string). - :type spark_version: object - :param core_configuration: Specifies the core configuration parameters (as in core-site.xml) - for the HDInsight cluster to be created. - :type core_configuration: object - :param h_base_configuration: Specifies the HBase configuration parameters (hbase-site.xml) for - the HDInsight cluster. - :type h_base_configuration: object - :param hdfs_configuration: Specifies the HDFS configuration parameters (hdfs-site.xml) for the - HDInsight cluster. - :type hdfs_configuration: object - :param hive_configuration: Specifies the hive configuration parameters (hive-site.xml) for the - HDInsight cluster. - :type hive_configuration: object - :param map_reduce_configuration: Specifies the MapReduce configuration parameters (mapred- - site.xml) for the HDInsight cluster. - :type map_reduce_configuration: object - :param oozie_configuration: Specifies the Oozie configuration parameters (oozie-site.xml) for - the HDInsight cluster. - :type oozie_configuration: object - :param storm_configuration: Specifies the Storm configuration parameters (storm-site.xml) for - the HDInsight cluster. - :type storm_configuration: object - :param yarn_configuration: Specifies the Yarn configuration parameters (yarn-site.xml) for the - HDInsight cluster. - :type yarn_configuration: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - :param head_node_size: Specifies the size of the head node for the HDInsight cluster. - :type head_node_size: object - :param data_node_size: Specifies the size of the data node for the HDInsight cluster. - :type data_node_size: object - :param zookeeper_node_size: Specifies the size of the Zoo Keeper node for the HDInsight - cluster. - :type zookeeper_node_size: object - :param script_actions: Custom script actions to run on HDI ondemand cluster once it's up. - Please refer to https://docs.microsoft.com/en-us/azure/hdinsight/hdinsight-hadoop-customize- - cluster-linux?toc=%2Fen-us%2Fazure%2Fhdinsight%2Fr-server%2FTOC.json&bc=%2Fen- - us%2Fazure%2Fbread%2Ftoc.json#understanding-script-actions. - :type script_actions: list[~azure.synapse.artifacts.models.ScriptAction] - :param virtual_network_id: The ARM resource ID for the vNet to which the cluster should be - joined after creation. Type: string (or Expression with resultType string). - :type virtual_network_id: object - :param subnet_name: The ARM resource ID for the subnet in the vNet. If virtualNetworkId was - specified, then this property is required. Type: string (or Expression with resultType string). - :type subnet_name: object + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object """ - _validation = { - 'type': {'required': True}, - 'cluster_size': {'required': True}, - 'time_to_live': {'required': True}, - 'version': {'required': True}, - 'linked_service_name': {'required': True}, - 'host_subscription_id': {'required': True}, - 'tenant': {'required': True}, - 'cluster_resource_group': {'required': True}, + _validation = { + 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'cluster_size': {'key': 'typeProperties.clusterSize', 'type': 'object'}, - 'time_to_live': {'key': 'typeProperties.timeToLive', 'type': 'object'}, - 'version': {'key': 'typeProperties.version', 'type': 'object'}, - 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, - 'host_subscription_id': {'key': 'typeProperties.hostSubscriptionId', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'cluster_resource_group': {'key': 'typeProperties.clusterResourceGroup', 'type': 'object'}, - 'cluster_name_prefix': {'key': 'typeProperties.clusterNamePrefix', 'type': 'object'}, - 'cluster_user_name': {'key': 'typeProperties.clusterUserName', 'type': 'object'}, - 'cluster_password': {'key': 'typeProperties.clusterPassword', 'type': 'SecretBase'}, - 'cluster_ssh_user_name': {'key': 'typeProperties.clusterSshUserName', 'type': 'object'}, - 'cluster_ssh_password': {'key': 'typeProperties.clusterSshPassword', 'type': 'SecretBase'}, - 'additional_linked_service_names': {'key': 'typeProperties.additionalLinkedServiceNames', 'type': '[LinkedServiceReference]'}, - 'hcatalog_linked_service_name': {'key': 'typeProperties.hcatalogLinkedServiceName', 'type': 'LinkedServiceReference'}, - 'cluster_type': {'key': 'typeProperties.clusterType', 'type': 'object'}, - 'spark_version': {'key': 'typeProperties.sparkVersion', 'type': 'object'}, - 'core_configuration': {'key': 'typeProperties.coreConfiguration', 'type': 'object'}, - 'h_base_configuration': {'key': 'typeProperties.hBaseConfiguration', 'type': 'object'}, - 'hdfs_configuration': {'key': 'typeProperties.hdfsConfiguration', 'type': 'object'}, - 'hive_configuration': {'key': 'typeProperties.hiveConfiguration', 'type': 'object'}, - 'map_reduce_configuration': {'key': 'typeProperties.mapReduceConfiguration', 'type': 'object'}, - 'oozie_configuration': {'key': 'typeProperties.oozieConfiguration', 'type': 'object'}, - 'storm_configuration': {'key': 'typeProperties.stormConfiguration', 'type': 'object'}, - 'yarn_configuration': {'key': 'typeProperties.yarnConfiguration', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'head_node_size': {'key': 'typeProperties.headNodeSize', 'type': 'object'}, - 'data_node_size': {'key': 'typeProperties.dataNodeSize', 'type': 'object'}, - 'zookeeper_node_size': {'key': 'typeProperties.zookeeperNodeSize', 'type': 'object'}, - 'script_actions': {'key': 'typeProperties.scriptActions', 'type': '[ScriptAction]'}, - 'virtual_network_id': {'key': 'typeProperties.virtualNetworkId', 'type': 'object'}, - 'subnet_name': {'key': 'typeProperties.subnetName', 'type': 'object'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, } def __init__( self, *, - cluster_size: object, - time_to_live: object, - version: object, - linked_service_name: "LinkedServiceReference", - host_subscription_id: object, - tenant: object, - cluster_resource_group: object, additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, - description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - service_principal_id: Optional[object] = None, - service_principal_key: Optional["SecretBase"] = None, - cluster_name_prefix: Optional[object] = None, - cluster_user_name: Optional[object] = None, - cluster_password: Optional["SecretBase"] = None, - cluster_ssh_user_name: Optional[object] = None, - cluster_ssh_password: Optional["SecretBase"] = None, - additional_linked_service_names: Optional[List["LinkedServiceReference"]] = None, - hcatalog_linked_service_name: Optional["LinkedServiceReference"] = None, - cluster_type: Optional[object] = None, - spark_version: Optional[object] = None, - core_configuration: Optional[object] = None, - h_base_configuration: Optional[object] = None, - hdfs_configuration: Optional[object] = None, - hive_configuration: Optional[object] = None, - map_reduce_configuration: Optional[object] = None, - oozie_configuration: Optional[object] = None, - storm_configuration: Optional[object] = None, - yarn_configuration: Optional[object] = None, - encrypted_credential: Optional[object] = None, - head_node_size: Optional[object] = None, - data_node_size: Optional[object] = None, - zookeeper_node_size: Optional[object] = None, - script_actions: Optional[List["ScriptAction"]] = None, - virtual_network_id: Optional[object] = None, - subnet_name: Optional[object] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, **kwargs ): - super(HDInsightOnDemandLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'HDInsightOnDemand' # type: str - self.cluster_size = cluster_size - self.time_to_live = time_to_live - self.version = version - self.linked_service_name = linked_service_name - self.host_subscription_id = host_subscription_id - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.tenant = tenant - self.cluster_resource_group = cluster_resource_group - self.cluster_name_prefix = cluster_name_prefix - self.cluster_user_name = cluster_user_name - self.cluster_password = cluster_password - self.cluster_ssh_user_name = cluster_ssh_user_name - self.cluster_ssh_password = cluster_ssh_password - self.additional_linked_service_names = additional_linked_service_names - self.hcatalog_linked_service_name = hcatalog_linked_service_name - self.cluster_type = cluster_type - self.spark_version = spark_version - self.core_configuration = core_configuration - self.h_base_configuration = h_base_configuration - self.hdfs_configuration = hdfs_configuration - self.hive_configuration = hive_configuration - self.map_reduce_configuration = map_reduce_configuration - self.oozie_configuration = oozie_configuration - self.storm_configuration = storm_configuration - self.yarn_configuration = yarn_configuration - self.encrypted_credential = encrypted_credential - self.head_node_size = head_node_size - self.data_node_size = data_node_size - self.zookeeper_node_size = zookeeper_node_size - self.script_actions = script_actions - self.virtual_network_id = virtual_network_id - self.subnet_name = subnet_name + super(HubspotSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'HubspotSource' # type: str + self.query = query -class HDInsightPigActivity(ExecutionActivity): - """HDInsight Pig activity type. +class IfConditionActivity(Activity): + """This activity evaluates a boolean expression and executes either the activities under the ifTrueActivities property or the ifFalseActivities property depending on the result of the expression. All required parameters must be populated in order to send to Azure. @@ -10753,28 +17895,22 @@ class HDInsightPigActivity(ExecutionActivity): :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param storage_linked_services: Storage linked service references. - :type storage_linked_services: list[~azure.synapse.artifacts.models.LinkedServiceReference] - :param arguments: User specified arguments to HDInsightActivity. Type: array (or Expression - with resultType array). - :type arguments: object - :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". - :type get_debug_info: str or ~azure.synapse.artifacts.models.HDInsightActivityDebugInfoOption - :param script_path: Script path. Type: string (or Expression with resultType string). - :type script_path: object - :param script_linked_service: Script linked service reference. - :type script_linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference - :param defines: Allows user to specify defines for Pig job request. - :type defines: dict[str, object] + :param expression: Required. An expression that would evaluate to Boolean. This is used to + determine the block of activities (ifTrueActivities or ifFalseActivities) that will be + executed. + :type expression: ~azure.synapse.artifacts.models.Expression + :param if_true_activities: List of activities to execute if expression is evaluated to true. + This is an optional property and if not provided, the activity will exit without any action. + :type if_true_activities: list[~azure.synapse.artifacts.models.Activity] + :param if_false_activities: List of activities to execute if expression is evaluated to false. + This is an optional property and if not provided, the activity will exit without any action. + :type if_false_activities: list[~azure.synapse.artifacts.models.Activity] """ _validation = { 'name': {'required': True}, 'type': {'required': True}, + 'expression': {'required': True}, } _attribute_map = { @@ -10784,271 +17920,287 @@ class HDInsightPigActivity(ExecutionActivity): 'description': {'key': 'description', 'type': 'str'}, 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, - 'arguments': {'key': 'typeProperties.arguments', 'type': 'object'}, - 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, - 'script_path': {'key': 'typeProperties.scriptPath', 'type': 'object'}, - 'script_linked_service': {'key': 'typeProperties.scriptLinkedService', 'type': 'LinkedServiceReference'}, - 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, + 'expression': {'key': 'typeProperties.expression', 'type': 'Expression'}, + 'if_true_activities': {'key': 'typeProperties.ifTrueActivities', 'type': '[Activity]'}, + 'if_false_activities': {'key': 'typeProperties.ifFalseActivities', 'type': '[Activity]'}, + } + + def __init__( + self, + *, + name: str, + expression: "Expression", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + depends_on: Optional[List["ActivityDependency"]] = None, + user_properties: Optional[List["UserProperty"]] = None, + if_true_activities: Optional[List["Activity"]] = None, + if_false_activities: Optional[List["Activity"]] = None, + **kwargs + ): + super(IfConditionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.type = 'IfCondition' # type: str + self.expression = expression + self.if_true_activities = if_true_activities + self.if_false_activities = if_false_activities + + +class ImpalaLinkedService(LinkedService): + """Impala server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param host: Required. The IP address or host name of the Impala server. (i.e. + 192.168.222.160). + :type host: object + :param port: The TCP port that the Impala server uses to listen for client connections. The + default value is 21050. + :type port: object + :param authentication_type: Required. The authentication type to use. Possible values include: + "Anonymous", "SASLUsername", "UsernameAndPassword". + :type authentication_type: str or ~azure.synapse.artifacts.models.ImpalaAuthenticationType + :param username: The user name used to access the Impala server. The default value is anonymous + when using SASLUsername. + :type username: object + :param password: The password corresponding to the user name when using UsernameAndPassword. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The + default value is false. + :type enable_ssl: object + :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for + verifying the server when connecting over SSL. This property can only be set when using SSL on + self-hosted IR. The default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust + store or from a specified PEM file. The default value is false. + :type use_system_trust_store: object + :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate + name to match the host name of the server when connecting over SSL. The default value is false. + :type allow_host_name_cn_mismatch: object + :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from + the server. The default value is false. + :type allow_self_signed_server_cert: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, + 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, *, - name: str, + host: object, + authentication_type: Union[str, "ImpalaAuthenticationType"], additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, - depends_on: Optional[List["ActivityDependency"]] = None, - user_properties: Optional[List["UserProperty"]] = None, - linked_service_name: Optional["LinkedServiceReference"] = None, - policy: Optional["ActivityPolicy"] = None, - storage_linked_services: Optional[List["LinkedServiceReference"]] = None, - arguments: Optional[object] = None, - get_debug_info: Optional[Union[str, "HDInsightActivityDebugInfoOption"]] = None, - script_path: Optional[object] = None, - script_linked_service: Optional["LinkedServiceReference"] = None, - defines: Optional[Dict[str, object]] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + port: Optional[object] = None, + username: Optional[object] = None, + password: Optional["SecretBase"] = None, + enable_ssl: Optional[object] = None, + trusted_cert_path: Optional[object] = None, + use_system_trust_store: Optional[object] = None, + allow_host_name_cn_mismatch: Optional[object] = None, + allow_self_signed_server_cert: Optional[object] = None, + encrypted_credential: Optional[object] = None, **kwargs ): - super(HDInsightPigActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type = 'HDInsightPig' # type: str - self.storage_linked_services = storage_linked_services - self.arguments = arguments - self.get_debug_info = get_debug_info - self.script_path = script_path - self.script_linked_service = script_linked_service - self.defines = defines + super(ImpalaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'Impala' # type: str + self.host = host + self.port = port + self.authentication_type = authentication_type + self.username = username + self.password = password + self.enable_ssl = enable_ssl + self.trusted_cert_path = trusted_cert_path + self.use_system_trust_store = use_system_trust_store + self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch + self.allow_self_signed_server_cert = allow_self_signed_server_cert + self.encrypted_credential = encrypted_credential -class HDInsightSparkActivity(ExecutionActivity): - """HDInsight Spark activity. +class ImpalaObjectDataset(Dataset): + """Impala server dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. Type of dataset.Constant filled by server. :type type: str - :param description: Activity description. + :param description: Dataset description. :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param root_path: Required. The root path in 'sparkJobLinkedService' for all the job’s files. - Type: string (or Expression with resultType string). - :type root_path: object - :param entry_file_path: Required. The relative path to the root folder of the code/package to - be executed. Type: string (or Expression with resultType string). - :type entry_file_path: object - :param arguments: The user-specified arguments to HDInsightSparkActivity. - :type arguments: list[object] - :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". - :type get_debug_info: str or ~azure.synapse.artifacts.models.HDInsightActivityDebugInfoOption - :param spark_job_linked_service: The storage linked service for uploading the entry file and - dependencies, and for receiving logs. - :type spark_job_linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference - :param class_name: The application's Java/Spark main class. - :type class_name: str - :param proxy_user: The user to impersonate that will execute the job. Type: string (or + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: This property will be retired. Please consider using schema + table + properties instead. + :type table_name: object + :param table: The table name of the Impala. Type: string (or Expression with resultType + string). + :type table: object + :param schema_type_properties_schema: The schema name of the Impala. Type: string (or Expression with resultType string). - :type proxy_user: object - :param spark_config: Spark configuration property. - :type spark_config: dict[str, object] + :type schema_type_properties_schema: object """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, - 'root_path': {'required': True}, - 'entry_file_path': {'required': True}, + 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'root_path': {'key': 'typeProperties.rootPath', 'type': 'object'}, - 'entry_file_path': {'key': 'typeProperties.entryFilePath', 'type': 'object'}, - 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, - 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, - 'spark_job_linked_service': {'key': 'typeProperties.sparkJobLinkedService', 'type': 'LinkedServiceReference'}, - 'class_name': {'key': 'typeProperties.className', 'type': 'str'}, - 'proxy_user': {'key': 'typeProperties.proxyUser', 'type': 'object'}, - 'spark_config': {'key': 'typeProperties.sparkConfig', 'type': '{object}'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } def __init__( self, *, - name: str, - root_path: object, - entry_file_path: object, + linked_service_name: "LinkedServiceReference", additional_properties: Optional[Dict[str, object]] = None, description: Optional[str] = None, - depends_on: Optional[List["ActivityDependency"]] = None, - user_properties: Optional[List["UserProperty"]] = None, - linked_service_name: Optional["LinkedServiceReference"] = None, - policy: Optional["ActivityPolicy"] = None, - arguments: Optional[List[object]] = None, - get_debug_info: Optional[Union[str, "HDInsightActivityDebugInfoOption"]] = None, - spark_job_linked_service: Optional["LinkedServiceReference"] = None, - class_name: Optional[str] = None, - proxy_user: Optional[object] = None, - spark_config: Optional[Dict[str, object]] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + table_name: Optional[object] = None, + table: Optional[object] = None, + schema_type_properties_schema: Optional[object] = None, **kwargs ): - super(HDInsightSparkActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type = 'HDInsightSpark' # type: str - self.root_path = root_path - self.entry_file_path = entry_file_path - self.arguments = arguments - self.get_debug_info = get_debug_info - self.spark_job_linked_service = spark_job_linked_service - self.class_name = class_name - self.proxy_user = proxy_user - self.spark_config = spark_config + super(ImpalaObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'ImpalaObject' # type: str + self.table_name = table_name + self.table = table + self.schema_type_properties_schema = schema_type_properties_schema -class HDInsightStreamingActivity(ExecutionActivity): - """HDInsight streaming activity type. +class ImpalaSource(TabularSource): + """A copy activity Impala server source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param storage_linked_services: Storage linked service references. - :type storage_linked_services: list[~azure.synapse.artifacts.models.LinkedServiceReference] - :param arguments: User specified arguments to HDInsightActivity. - :type arguments: list[object] - :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". - :type get_debug_info: str or ~azure.synapse.artifacts.models.HDInsightActivityDebugInfoOption - :param mapper: Required. Mapper executable name. Type: string (or Expression with resultType - string). - :type mapper: object - :param reducer: Required. Reducer executable name. Type: string (or Expression with resultType + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type reducer: object - :param input: Required. Input blob path. Type: string (or Expression with resultType string). - :type input: object - :param output: Required. Output blob path. Type: string (or Expression with resultType string). - :type output: object - :param file_paths: Required. Paths to streaming job files. Can be directories. - :type file_paths: list[object] - :param file_linked_service: Linked service reference where the files are located. - :type file_linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference - :param combiner: Combiner executable name. Type: string (or Expression with resultType string). - :type combiner: object - :param command_environment: Command line environment values. - :type command_environment: list[object] - :param defines: Allows user to specify defines for streaming job request. - :type defines: dict[str, object] + :type query: object """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, - 'mapper': {'required': True}, - 'reducer': {'required': True}, - 'input': {'required': True}, - 'output': {'required': True}, - 'file_paths': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, - 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, - 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, - 'mapper': {'key': 'typeProperties.mapper', 'type': 'object'}, - 'reducer': {'key': 'typeProperties.reducer', 'type': 'object'}, - 'input': {'key': 'typeProperties.input', 'type': 'object'}, - 'output': {'key': 'typeProperties.output', 'type': 'object'}, - 'file_paths': {'key': 'typeProperties.filePaths', 'type': '[object]'}, - 'file_linked_service': {'key': 'typeProperties.fileLinkedService', 'type': 'LinkedServiceReference'}, - 'combiner': {'key': 'typeProperties.combiner', 'type': 'object'}, - 'command_environment': {'key': 'typeProperties.commandEnvironment', 'type': '[object]'}, - 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, } def __init__( - self, - *, - name: str, - mapper: object, - reducer: object, - input: object, - output: object, - file_paths: List[object], + self, + *, additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - depends_on: Optional[List["ActivityDependency"]] = None, - user_properties: Optional[List["UserProperty"]] = None, - linked_service_name: Optional["LinkedServiceReference"] = None, - policy: Optional["ActivityPolicy"] = None, - storage_linked_services: Optional[List["LinkedServiceReference"]] = None, - arguments: Optional[List[object]] = None, - get_debug_info: Optional[Union[str, "HDInsightActivityDebugInfoOption"]] = None, - file_linked_service: Optional["LinkedServiceReference"] = None, - combiner: Optional[object] = None, - command_environment: Optional[List[object]] = None, - defines: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, **kwargs ): - super(HDInsightStreamingActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type = 'HDInsightStreaming' # type: str - self.storage_linked_services = storage_linked_services - self.arguments = arguments - self.get_debug_info = get_debug_info - self.mapper = mapper - self.reducer = reducer - self.input = input - self.output = output - self.file_paths = file_paths - self.file_linked_service = file_linked_service - self.combiner = combiner - self.command_environment = command_environment - self.defines = defines + super(ImpalaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'ImpalaSource' # type: str + self.query = query -class HiveLinkedService(LinkedService): - """Hive Server linked service. +class InformixLinkedService(LinkedService): + """Informix linked service. All required parameters must be populated in order to send to Azure. @@ -11065,53 +18217,22 @@ class HiveLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param host: Required. IP address or host name of the Hive server, separated by ';' for - multiple hosts (only when serviceDiscoveryMode is enable). - :type host: object - :param port: The TCP port that the Hive server uses to listen for client connections. - :type port: object - :param server_type: The type of Hive server. Possible values include: "HiveServer1", - "HiveServer2", "HiveThriftServer". - :type server_type: str or ~azure.synapse.artifacts.models.HiveServerType - :param thrift_transport_protocol: The transport protocol to use in the Thrift layer. Possible - values include: "Binary", "SASL", "HTTP ". - :type thrift_transport_protocol: str or - ~azure.synapse.artifacts.models.HiveThriftTransportProtocol - :param authentication_type: Required. The authentication method used to access the Hive server. - Possible values include: "Anonymous", "Username", "UsernameAndPassword", - "WindowsAzureHDInsightService". - :type authentication_type: str or ~azure.synapse.artifacts.models.HiveAuthenticationType - :param service_discovery_mode: true to indicate using the ZooKeeper service, false not. - :type service_discovery_mode: object - :param zoo_keeper_name_space: The namespace on ZooKeeper under which Hive Server 2 nodes are - added. - :type zoo_keeper_name_space: object - :param use_native_query: Specifies whether the driver uses native HiveQL queries,or converts - them into an equivalent form in HiveQL. - :type use_native_query: object - :param username: The user name that you use to access Hive Server. - :type username: object - :param password: The password corresponding to the user name that you provided in the Username - field. + :param connection_string: Required. The non-access credential portion of the connection string + as well as an optional encrypted credential. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param authentication_type: Type of authentication used to connect to the Informix as ODBC data + store. Possible values are: Anonymous and Basic. Type: string (or Expression with resultType + string). + :type authentication_type: object + :param credential: The access credential portion of the connection string specified in driver- + specific property-value format. + :type credential: ~azure.synapse.artifacts.models.SecretBase + :param user_name: User name for Basic authentication. Type: string (or Expression with + resultType string). + :type user_name: object + :param password: Password for Basic authentication. :type password: ~azure.synapse.artifacts.models.SecretBase - :param http_path: The partial URL corresponding to the Hive server. - :type http_path: object - :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The - default value is false. - :type enable_ssl: object - :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for - verifying the server when connecting over SSL. This property can only be set when using SSL on - self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust - store or from a specified PEM file. The default value is false. - :type use_system_trust_store: object - :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate - name to match the host name of the server when connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: object - :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from - the server. The default value is false. - :type allow_self_signed_server_cert: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -11120,8 +18241,7 @@ class HiveLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'host': {'required': True}, - 'authentication_type': {'required': True}, + 'connection_string': {'required': True}, } _attribute_map = { @@ -11131,75 +18251,160 @@ class HiveLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'server_type': {'key': 'typeProperties.serverType', 'type': 'str'}, - 'thrift_transport_protocol': {'key': 'typeProperties.thriftTransportProtocol', 'type': 'str'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'service_discovery_mode': {'key': 'typeProperties.serviceDiscoveryMode', 'type': 'object'}, - 'zoo_keeper_name_space': {'key': 'typeProperties.zooKeeperNameSpace', 'type': 'object'}, - 'use_native_query': {'key': 'typeProperties.useNativeQuery', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, - 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, - 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, *, - host: object, - authentication_type: Union[str, "HiveAuthenticationType"], + connection_string: object, additional_properties: Optional[Dict[str, object]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, - port: Optional[object] = None, - server_type: Optional[Union[str, "HiveServerType"]] = None, - thrift_transport_protocol: Optional[Union[str, "HiveThriftTransportProtocol"]] = None, - service_discovery_mode: Optional[object] = None, - zoo_keeper_name_space: Optional[object] = None, - use_native_query: Optional[object] = None, - username: Optional[object] = None, + authentication_type: Optional[object] = None, + credential: Optional["SecretBase"] = None, + user_name: Optional[object] = None, password: Optional["SecretBase"] = None, - http_path: Optional[object] = None, - enable_ssl: Optional[object] = None, - trusted_cert_path: Optional[object] = None, - use_system_trust_store: Optional[object] = None, - allow_host_name_cn_mismatch: Optional[object] = None, - allow_self_signed_server_cert: Optional[object] = None, encrypted_credential: Optional[object] = None, **kwargs ): - super(HiveLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'Hive' # type: str - self.host = host - self.port = port - self.server_type = server_type - self.thrift_transport_protocol = thrift_transport_protocol + super(InformixLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'Informix' # type: str + self.connection_string = connection_string self.authentication_type = authentication_type - self.service_discovery_mode = service_discovery_mode - self.zoo_keeper_name_space = zoo_keeper_name_space - self.use_native_query = use_native_query - self.username = username + self.credential = credential + self.user_name = user_name self.password = password - self.http_path = http_path - self.enable_ssl = enable_ssl - self.trusted_cert_path = trusted_cert_path - self.use_system_trust_store = use_system_trust_store - self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch - self.allow_self_signed_server_cert = allow_self_signed_server_cert self.encrypted_credential = encrypted_credential -class HiveObjectDataset(Dataset): - """Hive Server dataset. +class InformixSink(CopySink): + """A copy activity Informix sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param pre_copy_script: A query to execute before starting the copy. Type: string (or + Expression with resultType string). + :type pre_copy_script: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + pre_copy_script: Optional[object] = None, + **kwargs + ): + super(InformixSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'InformixSink' # type: str + self.pre_copy_script = pre_copy_script + + +class InformixSource(TabularSource): + """A copy activity source for Informix. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: Database query. Type: string (or Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(InformixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'InformixSource' # type: str + self.query = query + + +class InformixTableDataset(Dataset): + """The Informix table dataset. All required parameters must be populated in order to send to Azure. @@ -11225,14 +18430,9 @@ class HiveObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table - properties instead. + :param table_name: The Informix table name. Type: string (or Expression with resultType + string). :type table_name: object - :param table: The table name of the Hive. Type: string (or Expression with resultType string). - :type table: object - :param schema_type_properties_schema: The schema name of the Hive. Type: string (or Expression - with resultType string). - :type schema_type_properties_schema: object """ _validation = { @@ -11251,8 +18451,6 @@ class HiveObjectDataset(Dataset): 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } def __init__( @@ -11267,352 +18465,488 @@ def __init__( annotations: Optional[List[object]] = None, folder: Optional["DatasetFolder"] = None, table_name: Optional[object] = None, - table: Optional[object] = None, - schema_type_properties_schema: Optional[object] = None, **kwargs ): - super(HiveObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'HiveObject' # type: str + super(InformixTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'InformixTable' # type: str self.table_name = table_name - self.table = table - self.schema_type_properties_schema = schema_type_properties_schema -class HttpLinkedService(LinkedService): - """Linked service for an HTTP source. +class IntegrationRuntime(msrest.serialization.Model): + """Azure Synapse nested object which serves as a compute resource for activities. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: ManagedIntegrationRuntime, SelfHostedIntegrationRuntime. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. + :param type: Required. Type of integration runtime.Constant filled by server. Possible values + include: "Managed", "SelfHosted". + :type type: str or ~azure.synapse.artifacts.models.IntegrationRuntimeType + :param description: Integration runtime description. :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param url: Required. The base URL of the HTTP endpoint, e.g. http://www.microsoft.com. Type: - string (or Expression with resultType string). - :type url: object - :param authentication_type: The authentication type to be used to connect to the HTTP server. - Possible values include: "Basic", "Anonymous", "Digest", "Windows", "ClientCertificate". - :type authentication_type: str or ~azure.synapse.artifacts.models.HttpAuthenticationType - :param user_name: User name for Basic, Digest, or Windows authentication. Type: string (or - Expression with resultType string). - :type user_name: object - :param password: Password for Basic, Digest, Windows, or ClientCertificate with - EmbeddedCertData authentication. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param embedded_cert_data: Base64 encoded certificate data for ClientCertificate - authentication. For on-premises copy with ClientCertificate authentication, either - CertThumbprint or EmbeddedCertData/Password should be specified. Type: string (or Expression - with resultType string). - :type embedded_cert_data: object - :param cert_thumbprint: Thumbprint of certificate for ClientCertificate authentication. Only - valid for on-premises copy. For on-premises copy with ClientCertificate authentication, either - CertThumbprint or EmbeddedCertData/Password should be specified. Type: string (or Expression - with resultType string). - :type cert_thumbprint: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - :param enable_server_certificate_validation: If true, validate the HTTPS server SSL - certificate. Default value is true. Type: boolean (or Expression with resultType boolean). - :type enable_server_certificate_validation: object """ _validation = { 'type': {'required': True}, - 'url': {'required': True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'Managed': 'ManagedIntegrationRuntime', 'SelfHosted': 'SelfHostedIntegrationRuntime'} + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + **kwargs + ): + super(IntegrationRuntime, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = 'IntegrationRuntime' # type: str + self.description = description + + +class IntegrationRuntimeComputeProperties(msrest.serialization.Model): + """The compute resource properties for managed integration runtime. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param location: The location for managed integration runtime. The supported regions could be + found on https://docs.microsoft.com/en-us/azure/data-factory/data-factory-data-movement- + activities. + :type location: str + :param node_size: The node size requirement to managed integration runtime. + :type node_size: str + :param number_of_nodes: The required number of nodes for managed integration runtime. + :type number_of_nodes: int + :param max_parallel_executions_per_node: Maximum parallel executions count per node for managed + integration runtime. + :type max_parallel_executions_per_node: int + :param data_flow_properties: Data flow properties for managed integration runtime. + :type data_flow_properties: + ~azure.synapse.artifacts.models.IntegrationRuntimeDataFlowProperties + :param v_net_properties: VNet properties for managed integration runtime. + :type v_net_properties: ~azure.synapse.artifacts.models.IntegrationRuntimeVNetProperties + """ + + _validation = { + 'number_of_nodes': {'minimum': 1}, + 'max_parallel_executions_per_node': {'minimum': 1}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'location': {'key': 'location', 'type': 'str'}, + 'node_size': {'key': 'nodeSize', 'type': 'str'}, + 'number_of_nodes': {'key': 'numberOfNodes', 'type': 'int'}, + 'max_parallel_executions_per_node': {'key': 'maxParallelExecutionsPerNode', 'type': 'int'}, + 'data_flow_properties': {'key': 'dataFlowProperties', 'type': 'IntegrationRuntimeDataFlowProperties'}, + 'v_net_properties': {'key': 'vNetProperties', 'type': 'IntegrationRuntimeVNetProperties'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + location: Optional[str] = None, + node_size: Optional[str] = None, + number_of_nodes: Optional[int] = None, + max_parallel_executions_per_node: Optional[int] = None, + data_flow_properties: Optional["IntegrationRuntimeDataFlowProperties"] = None, + v_net_properties: Optional["IntegrationRuntimeVNetProperties"] = None, + **kwargs + ): + super(IntegrationRuntimeComputeProperties, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.location = location + self.node_size = node_size + self.number_of_nodes = number_of_nodes + self.max_parallel_executions_per_node = max_parallel_executions_per_node + self.data_flow_properties = data_flow_properties + self.v_net_properties = v_net_properties + + +class IntegrationRuntimeCustomSetupScriptProperties(msrest.serialization.Model): + """Custom setup script properties for a managed dedicated integration runtime. + + :param blob_container_uri: The URI of the Azure blob container that contains the custom setup + script. + :type blob_container_uri: str + :param sas_token: The SAS token of the Azure blob container. + :type sas_token: ~azure.synapse.artifacts.models.SecureString + """ + + _attribute_map = { + 'blob_container_uri': {'key': 'blobContainerUri', 'type': 'str'}, + 'sas_token': {'key': 'sasToken', 'type': 'SecureString'}, + } + + def __init__( + self, + *, + blob_container_uri: Optional[str] = None, + sas_token: Optional["SecureString"] = None, + **kwargs + ): + super(IntegrationRuntimeCustomSetupScriptProperties, self).__init__(**kwargs) + self.blob_container_uri = blob_container_uri + self.sas_token = sas_token + + +class IntegrationRuntimeDataFlowProperties(msrest.serialization.Model): + """Data flow properties for managed integration runtime. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param compute_type: Compute type of the cluster which will execute data flow job. Possible + values include: "General", "MemoryOptimized", "ComputeOptimized". + :type compute_type: str or ~azure.synapse.artifacts.models.DataFlowComputeType + :param core_count: Core count of the cluster which will execute data flow job. Supported values + are: 8, 16, 32, 48, 80, 144 and 272. + :type core_count: int + :param time_to_live: Time to live (in minutes) setting of the cluster which will execute data + flow job. + :type time_to_live: int + """ + + _validation = { + 'time_to_live': {'minimum': 0}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'compute_type': {'key': 'computeType', 'type': 'str'}, + 'core_count': {'key': 'coreCount', 'type': 'int'}, + 'time_to_live': {'key': 'timeToLive', 'type': 'int'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + compute_type: Optional[Union[str, "DataFlowComputeType"]] = None, + core_count: Optional[int] = None, + time_to_live: Optional[int] = None, + **kwargs + ): + super(IntegrationRuntimeDataFlowProperties, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.compute_type = compute_type + self.core_count = core_count + self.time_to_live = time_to_live + + +class IntegrationRuntimeDataProxyProperties(msrest.serialization.Model): + """Data proxy properties for a managed dedicated integration runtime. + + :param connect_via: The self-hosted integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.EntityReference + :param staging_linked_service: The staging linked service reference. + :type staging_linked_service: ~azure.synapse.artifacts.models.EntityReference + :param path: The path to contain the staged data in the Blob storage. + :type path: str + """ + + _attribute_map = { + 'connect_via': {'key': 'connectVia', 'type': 'EntityReference'}, + 'staging_linked_service': {'key': 'stagingLinkedService', 'type': 'EntityReference'}, + 'path': {'key': 'path', 'type': 'str'}, + } + + def __init__( + self, + *, + connect_via: Optional["EntityReference"] = None, + staging_linked_service: Optional["EntityReference"] = None, + path: Optional[str] = None, + **kwargs + ): + super(IntegrationRuntimeDataProxyProperties, self).__init__(**kwargs) + self.connect_via = connect_via + self.staging_linked_service = staging_linked_service + self.path = path + + +class IntegrationRuntimeListResponse(msrest.serialization.Model): + """A list of integration runtime resources. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of integration runtimes. + :type value: list[~azure.synapse.artifacts.models.IntegrationRuntimeResource] + :param next_link: The link to the next page of results, if any remaining results exist. + :type next_link: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[IntegrationRuntimeResource]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + *, + value: List["IntegrationRuntimeResource"], + next_link: Optional[str] = None, + **kwargs + ): + super(IntegrationRuntimeListResponse, self).__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class IntegrationRuntimeReference(msrest.serialization.Model): + """Integration runtime reference type. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Type of integration runtime. Possible values include: + "IntegrationRuntimeReference". + :type type: str or ~azure.synapse.artifacts.models.IntegrationRuntimeReferenceType + :param reference_name: Required. Reference integration runtime name. + :type reference_name: str + :param parameters: Arguments for integration runtime. + :type parameters: dict[str, object] + """ + + _validation = { + 'type': {'required': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{object}'}, + } + + def __init__( + self, + *, + type: Union[str, "IntegrationRuntimeReferenceType"], + reference_name: str, + parameters: Optional[Dict[str, object]] = None, + **kwargs + ): + super(IntegrationRuntimeReference, self).__init__(**kwargs) + self.type = type + self.reference_name = reference_name + self.parameters = parameters + + +class IntegrationRuntimeResource(AzureEntityResource): + """Integration runtime resource type. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + :ivar etag: Resource Etag. + :vartype etag: str + :param properties: Required. Integration runtime properties. + :type properties: ~azure.synapse.artifacts.models.IntegrationRuntime + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'embedded_cert_data': {'key': 'typeProperties.embeddedCertData', 'type': 'object'}, - 'cert_thumbprint': {'key': 'typeProperties.certThumbprint', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'IntegrationRuntime'}, } def __init__( self, *, - url: object, - additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, - description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - authentication_type: Optional[Union[str, "HttpAuthenticationType"]] = None, - user_name: Optional[object] = None, - password: Optional["SecretBase"] = None, - embedded_cert_data: Optional[object] = None, - cert_thumbprint: Optional[object] = None, - encrypted_credential: Optional[object] = None, - enable_server_certificate_validation: Optional[object] = None, + properties: "IntegrationRuntime", **kwargs ): - super(HttpLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'HttpServer' # type: str - self.url = url - self.authentication_type = authentication_type - self.user_name = user_name - self.password = password - self.embedded_cert_data = embedded_cert_data - self.cert_thumbprint = cert_thumbprint - self.encrypted_credential = encrypted_credential - self.enable_server_certificate_validation = enable_server_certificate_validation - + super(IntegrationRuntimeResource, self).__init__(**kwargs) + self.properties = properties -class HubspotLinkedService(LinkedService): - """Hubspot Service linked service. - All required parameters must be populated in order to send to Azure. +class IntegrationRuntimeSsisCatalogInfo(msrest.serialization.Model): + """Catalog information for managed dedicated integration runtime. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param client_id: Required. The client ID associated with your Hubspot application. - :type client_id: object - :param client_secret: The client secret associated with your Hubspot application. - :type client_secret: ~azure.synapse.artifacts.models.SecretBase - :param access_token: The access token obtained when initially authenticating your OAuth - integration. - :type access_token: ~azure.synapse.artifacts.models.SecretBase - :param refresh_token: The refresh token obtained when initially authenticating your OAuth - integration. - :type refresh_token: ~azure.synapse.artifacts.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using - HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name in the server's - certificate to match the host name of the server when connecting over SSL. The default value is - true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of the server when - connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param catalog_server_endpoint: The catalog database server URL. + :type catalog_server_endpoint: str + :param catalog_admin_user_name: The administrator user name of catalog database. + :type catalog_admin_user_name: str + :param catalog_admin_password: The password of the administrator user account of the catalog + database. + :type catalog_admin_password: ~azure.synapse.artifacts.models.SecureString + :param catalog_pricing_tier: The pricing tier for the catalog database. The valid values could + be found in https://azure.microsoft.com/en-us/pricing/details/sql-database/. Possible values + include: "Basic", "Standard", "Premium", "PremiumRS". + :type catalog_pricing_tier: str or + ~azure.synapse.artifacts.models.IntegrationRuntimeSsisCatalogPricingTier """ _validation = { - 'type': {'required': True}, - 'client_id': {'required': True}, + 'catalog_admin_user_name': {'max_length': 128, 'min_length': 1}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, - 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'catalog_server_endpoint': {'key': 'catalogServerEndpoint', 'type': 'str'}, + 'catalog_admin_user_name': {'key': 'catalogAdminUserName', 'type': 'str'}, + 'catalog_admin_password': {'key': 'catalogAdminPassword', 'type': 'SecureString'}, + 'catalog_pricing_tier': {'key': 'catalogPricingTier', 'type': 'str'}, } def __init__( self, *, - client_id: object, additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, - description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - client_secret: Optional["SecretBase"] = None, - access_token: Optional["SecretBase"] = None, - refresh_token: Optional["SecretBase"] = None, - use_encrypted_endpoints: Optional[object] = None, - use_host_verification: Optional[object] = None, - use_peer_verification: Optional[object] = None, - encrypted_credential: Optional[object] = None, + catalog_server_endpoint: Optional[str] = None, + catalog_admin_user_name: Optional[str] = None, + catalog_admin_password: Optional["SecureString"] = None, + catalog_pricing_tier: Optional[Union[str, "IntegrationRuntimeSsisCatalogPricingTier"]] = None, **kwargs ): - super(HubspotLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'Hubspot' # type: str - self.client_id = client_id - self.client_secret = client_secret - self.access_token = access_token - self.refresh_token = refresh_token - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification - self.encrypted_credential = encrypted_credential - + super(IntegrationRuntimeSsisCatalogInfo, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.catalog_server_endpoint = catalog_server_endpoint + self.catalog_admin_user_name = catalog_admin_user_name + self.catalog_admin_password = catalog_admin_password + self.catalog_pricing_tier = catalog_pricing_tier -class HubspotObjectDataset(Dataset): - """Hubspot Service dataset. - All required parameters must be populated in order to send to Azure. +class IntegrationRuntimeSsisProperties(msrest.serialization.Model): + """SSIS properties for managed integration runtime. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :param catalog_info: Catalog information for managed dedicated integration runtime. + :type catalog_info: ~azure.synapse.artifacts.models.IntegrationRuntimeSsisCatalogInfo + :param license_type: License type for bringing your own license scenario. Possible values + include: "BasePrice", "LicenseIncluded". + :type license_type: str or ~azure.synapse.artifacts.models.IntegrationRuntimeLicenseType + :param custom_setup_script_properties: Custom setup script properties for a managed dedicated + integration runtime. + :type custom_setup_script_properties: + ~azure.synapse.artifacts.models.IntegrationRuntimeCustomSetupScriptProperties + :param data_proxy_properties: Data proxy properties for a managed dedicated integration + runtime. + :type data_proxy_properties: + ~azure.synapse.artifacts.models.IntegrationRuntimeDataProxyProperties + :param edition: The edition for the SSIS Integration Runtime. Possible values include: + "Standard", "Enterprise". + :type edition: str or ~azure.synapse.artifacts.models.IntegrationRuntimeEdition + :param express_custom_setup_properties: Custom setup without script properties for a SSIS + integration runtime. + :type express_custom_setup_properties: list[~azure.synapse.artifacts.models.CustomSetupBase] """ - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'catalog_info': {'key': 'catalogInfo', 'type': 'IntegrationRuntimeSsisCatalogInfo'}, + 'license_type': {'key': 'licenseType', 'type': 'str'}, + 'custom_setup_script_properties': {'key': 'customSetupScriptProperties', 'type': 'IntegrationRuntimeCustomSetupScriptProperties'}, + 'data_proxy_properties': {'key': 'dataProxyProperties', 'type': 'IntegrationRuntimeDataProxyProperties'}, + 'edition': {'key': 'edition', 'type': 'str'}, + 'express_custom_setup_properties': {'key': 'expressCustomSetupProperties', 'type': '[CustomSetupBase]'}, } def __init__( self, *, - linked_service_name: "LinkedServiceReference", additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, + catalog_info: Optional["IntegrationRuntimeSsisCatalogInfo"] = None, + license_type: Optional[Union[str, "IntegrationRuntimeLicenseType"]] = None, + custom_setup_script_properties: Optional["IntegrationRuntimeCustomSetupScriptProperties"] = None, + data_proxy_properties: Optional["IntegrationRuntimeDataProxyProperties"] = None, + edition: Optional[Union[str, "IntegrationRuntimeEdition"]] = None, + express_custom_setup_properties: Optional[List["CustomSetupBase"]] = None, **kwargs ): - super(HubspotObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'HubspotObject' # type: str - self.table_name = table_name - + super(IntegrationRuntimeSsisProperties, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.catalog_info = catalog_info + self.license_type = license_type + self.custom_setup_script_properties = custom_setup_script_properties + self.data_proxy_properties = data_proxy_properties + self.edition = edition + self.express_custom_setup_properties = express_custom_setup_properties -class IfConditionActivity(Activity): - """This activity evaluates a boolean expression and executes either the activities under the ifTrueActivities property or the ifFalseActivities property depending on the result of the expression. - All required parameters must be populated in order to send to Azure. +class IntegrationRuntimeVNetProperties(msrest.serialization.Model): + """VNet properties for managed integration runtime. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param expression: Required. An expression that would evaluate to Boolean. This is used to - determine the block of activities (ifTrueActivities or ifFalseActivities) that will be - executed. - :type expression: ~azure.synapse.artifacts.models.Expression - :param if_true_activities: List of activities to execute if expression is evaluated to true. - This is an optional property and if not provided, the activity will exit without any action. - :type if_true_activities: list[~azure.synapse.artifacts.models.Activity] - :param if_false_activities: List of activities to execute if expression is evaluated to false. - This is an optional property and if not provided, the activity will exit without any action. - :type if_false_activities: list[~azure.synapse.artifacts.models.Activity] + :param v_net_id: The ID of the VNet that this integration runtime will join. + :type v_net_id: str + :param subnet: The name of the subnet this integration runtime will join. + :type subnet: str + :param public_i_ps: Resource IDs of the public IP addresses that this integration runtime will + use. + :type public_i_ps: list[str] """ - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'expression': {'required': True}, - } - _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'expression': {'key': 'typeProperties.expression', 'type': 'Expression'}, - 'if_true_activities': {'key': 'typeProperties.ifTrueActivities', 'type': '[Activity]'}, - 'if_false_activities': {'key': 'typeProperties.ifFalseActivities', 'type': '[Activity]'}, + 'v_net_id': {'key': 'vNetId', 'type': 'str'}, + 'subnet': {'key': 'subnet', 'type': 'str'}, + 'public_i_ps': {'key': 'publicIPs', 'type': '[str]'}, } def __init__( self, *, - name: str, - expression: "Expression", additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - depends_on: Optional[List["ActivityDependency"]] = None, - user_properties: Optional[List["UserProperty"]] = None, - if_true_activities: Optional[List["Activity"]] = None, - if_false_activities: Optional[List["Activity"]] = None, + v_net_id: Optional[str] = None, + subnet: Optional[str] = None, + public_i_ps: Optional[List[str]] = None, **kwargs ): - super(IfConditionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.type = 'IfCondition' # type: str - self.expression = expression - self.if_true_activities = if_true_activities - self.if_false_activities = if_false_activities + super(IntegrationRuntimeVNetProperties, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.v_net_id = v_net_id + self.subnet = subnet + self.public_i_ps = public_i_ps -class ImpalaLinkedService(LinkedService): - """Impala server linked service. +class JiraLinkedService(LinkedService): + """Jira Service linked service. All required parameters must be populated in order to send to Azure. @@ -11628,37 +18962,28 @@ class ImpalaLinkedService(LinkedService): :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param host: Required. The IP address or host name of the Impala server. (i.e. - 192.168.222.160). - :type host: object - :param port: The TCP port that the Impala server uses to listen for client connections. The - default value is 21050. - :type port: object - :param authentication_type: Required. The authentication type to use. Possible values include: - "Anonymous", "SASLUsername", "UsernameAndPassword". - :type authentication_type: str or ~azure.synapse.artifacts.models.ImpalaAuthenticationType - :param username: The user name used to access the Impala server. The default value is anonymous - when using SASLUsername. - :type username: object - :param password: The password corresponding to the user name when using UsernameAndPassword. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The - default value is false. - :type enable_ssl: object - :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for - verifying the server when connecting over SSL. This property can only be set when using SSL on - self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust - store or from a specified PEM file. The default value is false. - :type use_system_trust_store: object - :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate - name to match the host name of the server when connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: object - :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from - the server. The default value is false. - :type allow_self_signed_server_cert: object + :type annotations: list[object] + :param host: Required. The IP address or host name of the Jira service. (e.g. + jira.example.com). + :type host: object + :param port: The TCP port that the Jira server uses to listen for client connections. The + default value is 443 if connecting through HTTPS, or 8080 if connecting through HTTP. + :type port: object + :param username: Required. The user name that you use to access Jira Service. + :type username: object + :param password: The password corresponding to the user name that you provided in the username + field. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using + HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name in the server's + certificate to match the host name of the server when connecting over SSL. The default value is + true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of the server when + connecting over SSL. The default value is true. + :type use_peer_verification: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -11668,7 +18993,7 @@ class ImpalaLinkedService(LinkedService): _validation = { 'type': {'required': True}, 'host': {'required': True}, - 'authentication_type': {'required': True}, + 'username': {'required': True}, } _attribute_map = { @@ -11680,14 +19005,11 @@ class ImpalaLinkedService(LinkedService): 'annotations': {'key': 'annotations', 'type': '[object]'}, 'host': {'key': 'typeProperties.host', 'type': 'object'}, 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, - 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, - 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -11695,40 +19017,34 @@ def __init__( self, *, host: object, - authentication_type: Union[str, "ImpalaAuthenticationType"], + username: object, additional_properties: Optional[Dict[str, object]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, port: Optional[object] = None, - username: Optional[object] = None, password: Optional["SecretBase"] = None, - enable_ssl: Optional[object] = None, - trusted_cert_path: Optional[object] = None, - use_system_trust_store: Optional[object] = None, - allow_host_name_cn_mismatch: Optional[object] = None, - allow_self_signed_server_cert: Optional[object] = None, + use_encrypted_endpoints: Optional[object] = None, + use_host_verification: Optional[object] = None, + use_peer_verification: Optional[object] = None, encrypted_credential: Optional[object] = None, **kwargs ): - super(ImpalaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'Impala' # type: str + super(JiraLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'Jira' # type: str self.host = host self.port = port - self.authentication_type = authentication_type self.username = username self.password = password - self.enable_ssl = enable_ssl - self.trusted_cert_path = trusted_cert_path - self.use_system_trust_store = use_system_trust_store - self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch - self.allow_self_signed_server_cert = allow_self_signed_server_cert + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification self.encrypted_credential = encrypted_credential -class ImpalaObjectDataset(Dataset): - """Impala server dataset. +class JiraObjectDataset(Dataset): + """Jira Service dataset. All required parameters must be populated in order to send to Azure. @@ -11754,15 +19070,8 @@ class ImpalaObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table - properties instead. + :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object - :param table: The table name of the Impala. Type: string (or Expression with resultType - string). - :type table: object - :param schema_type_properties_schema: The schema name of the Impala. Type: string (or - Expression with resultType string). - :type schema_type_properties_schema: object """ _validation = { @@ -11781,8 +19090,6 @@ class ImpalaObjectDataset(Dataset): 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } def __init__( @@ -11797,105 +19104,72 @@ def __init__( annotations: Optional[List[object]] = None, folder: Optional["DatasetFolder"] = None, table_name: Optional[object] = None, - table: Optional[object] = None, - schema_type_properties_schema: Optional[object] = None, **kwargs ): - super(ImpalaObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'ImpalaObject' # type: str + super(JiraObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'JiraObject' # type: str self.table_name = table_name - self.table = table - self.schema_type_properties_schema = schema_type_properties_schema -class InformixLinkedService(LinkedService): - """Informix linked service. +class JiraSource(TabularSource): + """A copy activity Jira Service source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: Required. The non-access credential portion of the connection string - as well as an optional encrypted credential. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param authentication_type: Type of authentication used to connect to the Informix as ODBC data - store. Possible values are: Anonymous and Basic. Type: string (or Expression with resultType + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). - :type authentication_type: object - :param credential: The access credential portion of the connection string specified in driver- - specific property-value format. - :type credential: ~azure.synapse.artifacts.models.SecretBase - :param user_name: User name for Basic authentication. Type: string (or Expression with - resultType string). - :type user_name: object - :param password: Password for Basic authentication. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :type query: object """ _validation = { 'type': {'required': True}, - 'connection_string': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, - 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, } def __init__( self, *, - connection_string: object, additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, - description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - authentication_type: Optional[object] = None, - credential: Optional["SecretBase"] = None, - user_name: Optional[object] = None, - password: Optional["SecretBase"] = None, - encrypted_credential: Optional[object] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, **kwargs ): - super(InformixLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'Informix' # type: str - self.connection_string = connection_string - self.authentication_type = authentication_type - self.credential = credential - self.user_name = user_name - self.password = password - self.encrypted_credential = encrypted_credential + super(JiraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'JiraSource' # type: str + self.query = query -class InformixTableDataset(Dataset): - """The Informix table dataset. +class JsonDataset(Dataset): + """Json dataset. All required parameters must be populated in order to send to Azure. @@ -11921,333 +19195,425 @@ class InformixTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The Informix table name. Type: string (or Expression with resultType - string). - :type table_name: object + :param location: The location of the json data storage. + :type location: ~azure.synapse.artifacts.models.DatasetLocation + :param encoding_name: The code page name of the preferred encoding. If not specified, the + default value is UTF-8, unless BOM denotes another Unicode encoding. Refer to the name column + of the table in the following link to set supported values: + https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with + resultType string). + :type encoding_name: object + :param compression: The data compression method used for the json dataset. + :type compression: ~azure.synapse.artifacts.models.DatasetCompression + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'encoding_name': {'key': 'typeProperties.encodingName', 'type': 'object'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__( + self, + *, + linked_service_name: "LinkedServiceReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + location: Optional["DatasetLocation"] = None, + encoding_name: Optional[object] = None, + compression: Optional["DatasetCompression"] = None, + **kwargs + ): + super(JsonDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'Json' # type: str + self.location = location + self.encoding_name = encoding_name + self.compression = compression + + +class JsonFormat(DatasetStorageFormat): + """The data stored in JSON format. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage format.Constant filled by server. + :type type: str + :param serializer: Serializer. Type: string (or Expression with resultType string). + :type serializer: object + :param deserializer: Deserializer. Type: string (or Expression with resultType string). + :type deserializer: object + :param file_pattern: File pattern of JSON. To be more specific, the way of separating a + collection of JSON objects. The default value is 'setOfObjects'. It is case-sensitive. Possible + values include: "setOfObjects", "arrayOfObjects". + :type file_pattern: str or ~azure.synapse.artifacts.models.JsonFormatFilePattern + :param nesting_separator: The character used to separate nesting levels. Default value is '.' + (dot). Type: string (or Expression with resultType string). + :type nesting_separator: object + :param encoding_name: The code page name of the preferred encoding. If not provided, the + default value is 'utf-8', unless the byte order mark (BOM) denotes another Unicode encoding. + The full list of supported values can be found in the 'Name' column of the table of encodings + in the following reference: https://go.microsoft.com/fwlink/?linkid=861078. Type: string (or + Expression with resultType string). + :type encoding_name: object + :param json_node_reference: The JSONPath of the JSON array element to be flattened. Example: + "$.ArrayPath". Type: string (or Expression with resultType string). + :type json_node_reference: object + :param json_path_definition: The JSONPath definition for each column mapping with a customized + column name to extract data from JSON file. For fields under root object, start with "$"; for + fields inside the array chosen by jsonNodeReference property, start from the array element. + Example: {"Column1": "$.Column1Path", "Column2": "Column2PathInArray"}. Type: object (or + Expression with resultType object). + :type json_path_definition: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'serializer': {'key': 'serializer', 'type': 'object'}, + 'deserializer': {'key': 'deserializer', 'type': 'object'}, + 'file_pattern': {'key': 'filePattern', 'type': 'str'}, + 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, + 'encoding_name': {'key': 'encodingName', 'type': 'object'}, + 'json_node_reference': {'key': 'jsonNodeReference', 'type': 'object'}, + 'json_path_definition': {'key': 'jsonPathDefinition', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + serializer: Optional[object] = None, + deserializer: Optional[object] = None, + file_pattern: Optional[Union[str, "JsonFormatFilePattern"]] = None, + nesting_separator: Optional[object] = None, + encoding_name: Optional[object] = None, + json_node_reference: Optional[object] = None, + json_path_definition: Optional[object] = None, + **kwargs + ): + super(JsonFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs) + self.type = 'JsonFormat' # type: str + self.file_pattern = file_pattern + self.nesting_separator = nesting_separator + self.encoding_name = encoding_name + self.json_node_reference = json_node_reference + self.json_path_definition = json_path_definition + + +class JsonSink(CopySink): + """A copy activity Json sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param store_settings: Json store settings. + :type store_settings: ~azure.synapse.artifacts.models.StoreWriteSettings + :param format_settings: Json format settings. + :type format_settings: ~azure.synapse.artifacts.models.JsonWriteSettings """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'JsonWriteSettings'}, } def __init__( self, *, - linked_service_name: "LinkedServiceReference", additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + store_settings: Optional["StoreWriteSettings"] = None, + format_settings: Optional["JsonWriteSettings"] = None, **kwargs ): - super(InformixTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'InformixTable' # type: str - self.table_name = table_name + super(JsonSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'JsonSink' # type: str + self.store_settings = store_settings + self.format_settings = format_settings -class IntegrationRuntimeReference(msrest.serialization.Model): - """Integration runtime reference type. +class JsonSource(CopySource): + """A copy activity Json source. All required parameters must be populated in order to send to Azure. - :param type: Required. Type of integration runtime. Possible values include: - "IntegrationRuntimeReference". - :type type: str or ~azure.synapse.artifacts.models.IntegrationRuntimeReferenceType - :param reference_name: Required. Reference integration runtime name. - :type reference_name: str - :param parameters: Arguments for integration runtime. - :type parameters: dict[str, object] + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param store_settings: Json store settings. + :type store_settings: ~azure.synapse.artifacts.models.StoreReadSettings """ _validation = { 'type': {'required': True}, - 'reference_name': {'required': True}, } _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'reference_name': {'key': 'referenceName', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, } def __init__( self, *, - type: Union[str, "IntegrationRuntimeReferenceType"], - reference_name: str, - parameters: Optional[Dict[str, object]] = None, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + store_settings: Optional["StoreReadSettings"] = None, **kwargs ): - super(IntegrationRuntimeReference, self).__init__(**kwargs) - self.type = type - self.reference_name = reference_name - self.parameters = parameters + super(JsonSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'JsonSource' # type: str + self.store_settings = store_settings -class JiraLinkedService(LinkedService): - """Jira Service linked service. +class JsonWriteSettings(FormatWriteSettings): + """Json write settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. The write setting type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param host: Required. The IP address or host name of the Jira service. (e.g. - jira.example.com). - :type host: object - :param port: The TCP port that the Jira server uses to listen for client connections. The - default value is 443 if connecting through HTTPS, or 8080 if connecting through HTTP. - :type port: object - :param username: Required. The user name that you use to access Jira Service. - :type username: object - :param password: The password corresponding to the user name that you provided in the username - field. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using - HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name in the server's - certificate to match the host name of the server when connecting over SSL. The default value is - true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of the server when - connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param file_pattern: File pattern of JSON. This setting controls the way a collection of JSON + objects will be treated. The default value is 'setOfObjects'. It is case-sensitive. Possible + values include: "setOfObjects", "arrayOfObjects". + :type file_pattern: str or ~azure.synapse.artifacts.models.JsonWriteFilePattern """ _validation = { 'type': {'required': True}, - 'host': {'required': True}, - 'username': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'file_pattern': {'key': 'filePattern', 'type': 'str'}, } def __init__( self, *, - host: object, - username: object, additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, - description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - port: Optional[object] = None, - password: Optional["SecretBase"] = None, - use_encrypted_endpoints: Optional[object] = None, - use_host_verification: Optional[object] = None, - use_peer_verification: Optional[object] = None, - encrypted_credential: Optional[object] = None, + file_pattern: Optional[Union[str, "JsonWriteFilePattern"]] = None, **kwargs ): - super(JiraLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'Jira' # type: str - self.host = host - self.port = port - self.username = username - self.password = password - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification - self.encrypted_credential = encrypted_credential + super(JsonWriteSettings, self).__init__(additional_properties=additional_properties, **kwargs) + self.type = 'JsonWriteSettings' # type: str + self.file_pattern = file_pattern -class JiraObjectDataset(Dataset): - """Jira Service dataset. +class LibraryRequirements(msrest.serialization.Model): + """Library requirements for a Big Data pool powered by Apache Spark. - All required parameters must be populated in order to send to Azure. + Variables are only populated by the server, and will be ignored when sending a request. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :ivar time: The last update time of the library requirements file. + :vartype time: ~datetime.datetime + :param content: The library requirements. + :type content: str + :param filename: The filename of the library requirements file. + :type filename: str """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + 'time': {'readonly': True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'time': {'key': 'time', 'type': 'iso-8601'}, + 'content': {'key': 'content', 'type': 'str'}, + 'filename': {'key': 'filename', 'type': 'str'}, } def __init__( self, *, - linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, + content: Optional[str] = None, + filename: Optional[str] = None, **kwargs ): - super(JiraObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'JiraObject' # type: str - self.table_name = table_name + super(LibraryRequirements, self).__init__(**kwargs) + self.time = None + self.content = content + self.filename = filename -class JsonDataset(Dataset): - """Json dataset. +class LinkedIntegrationRuntimeType(msrest.serialization.Model): + """The base definition of a linked integration runtime. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: LinkedIntegrationRuntimeKeyAuthorization, LinkedIntegrationRuntimeRbacAuthorization. All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param location: The location of the json data storage. - :type location: ~azure.synapse.artifacts.models.DatasetLocation - :param encoding_name: The code page name of the preferred encoding. If not specified, the - default value is UTF-8, unless BOM denotes another Unicode encoding. Refer to the name column - of the table in the following link to set supported values: - https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with - resultType string). - :type encoding_name: object - :param compression: The data compression method used for the json dataset. - :type compression: ~azure.synapse.artifacts.models.DatasetCompression + :param authorization_type: Required. The authorization type for integration runtime + sharing.Constant filled by server. + :type authorization_type: str + """ + + _validation = { + 'authorization_type': {'required': True}, + } + + _attribute_map = { + 'authorization_type': {'key': 'authorizationType', 'type': 'str'}, + } + + _subtype_map = { + 'authorization_type': {'Key': 'LinkedIntegrationRuntimeKeyAuthorization', 'RBAC': 'LinkedIntegrationRuntimeRbacAuthorization'} + } + + def __init__( + self, + **kwargs + ): + super(LinkedIntegrationRuntimeType, self).__init__(**kwargs) + self.authorization_type = None # type: Optional[str] + + +class LinkedIntegrationRuntimeKeyAuthorization(LinkedIntegrationRuntimeType): + """The key authorization type integration runtime. + + All required parameters must be populated in order to send to Azure. + + :param authorization_type: Required. The authorization type for integration runtime + sharing.Constant filled by server. + :type authorization_type: str + :param key: Required. The key used for authorization. + :type key: ~azure.synapse.artifacts.models.SecureString """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + 'authorization_type': {'required': True}, + 'key': {'required': True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'encoding_name': {'key': 'typeProperties.encodingName', 'type': 'object'}, - 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + 'authorization_type': {'key': 'authorizationType', 'type': 'str'}, + 'key': {'key': 'key', 'type': 'SecureString'}, } def __init__( self, *, - linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, - location: Optional["DatasetLocation"] = None, - encoding_name: Optional[object] = None, - compression: Optional["DatasetCompression"] = None, + key: "SecureString", **kwargs ): - super(JsonDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'Json' # type: str - self.location = location - self.encoding_name = encoding_name - self.compression = compression + super(LinkedIntegrationRuntimeKeyAuthorization, self).__init__(**kwargs) + self.authorization_type = 'Key' # type: str + self.key = key + + +class LinkedIntegrationRuntimeRbacAuthorization(LinkedIntegrationRuntimeType): + """The role based access control (RBAC) authorization type integration runtime. + + All required parameters must be populated in order to send to Azure. + + :param authorization_type: Required. The authorization type for integration runtime + sharing.Constant filled by server. + :type authorization_type: str + :param resource_id: Required. The resource identifier of the integration runtime to be shared. + :type resource_id: str + """ + + _validation = { + 'authorization_type': {'required': True}, + 'resource_id': {'required': True}, + } + + _attribute_map = { + 'authorization_type': {'key': 'authorizationType', 'type': 'str'}, + 'resource_id': {'key': 'resourceId', 'type': 'str'}, + } + + def __init__( + self, + *, + resource_id: str, + **kwargs + ): + super(LinkedIntegrationRuntimeRbacAuthorization, self).__init__(**kwargs) + self.authorization_type = 'RBAC' # type: str + self.resource_id = resource_id class LinkedServiceDebugResource(SubResourceDebugResource): @@ -12352,20 +19718,22 @@ def __init__( self.parameters = parameters -class LinkedServiceResource(SubResource): +class LinkedServiceResource(AzureEntityResource): """Linked service resource type. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :ivar id: The resource identifier. + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. :vartype id: str - :ivar name: The resource name. + :ivar name: The name of the resource. :vartype name: str - :ivar type: The resource type. + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. :vartype type: str - :ivar etag: Etag identifies change in the resource. + :ivar etag: Resource Etag. :vartype etag: str :param properties: Required. Properties of linked service. :type properties: ~azure.synapse.artifacts.models.LinkedService @@ -12662,6 +20030,155 @@ def __init__( self.table_name = table_name +class MagentoSource(TabularSource): + """A copy activity Magento server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(MagentoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'MagentoSource' # type: str + self.query = query + + +class ManagedIdentity(msrest.serialization.Model): + """The workspace managed identity. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar principal_id: The principal ID of the workspace managed identity. + :vartype principal_id: str + :ivar tenant_id: The tenant ID of the workspace managed identity. + :vartype tenant_id: str + :param type: The type of managed identity for the workspace. Possible values include: "None", + "SystemAssigned". + :type type: str or ~azure.synapse.artifacts.models.ResourceIdentityType + """ + + _validation = { + 'principal_id': {'readonly': True}, + 'tenant_id': {'readonly': True}, + } + + _attribute_map = { + 'principal_id': {'key': 'principalId', 'type': 'str'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__( + self, + *, + type: Optional[Union[str, "ResourceIdentityType"]] = None, + **kwargs + ): + super(ManagedIdentity, self).__init__(**kwargs) + self.principal_id = None + self.tenant_id = None + self.type = type + + +class ManagedIntegrationRuntime(IntegrationRuntime): + """Managed integration runtime, including managed elastic and managed dedicated integration runtimes. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of integration runtime.Constant filled by server. Possible values + include: "Managed", "SelfHosted". + :type type: str or ~azure.synapse.artifacts.models.IntegrationRuntimeType + :param description: Integration runtime description. + :type description: str + :ivar state: Integration runtime state, only valid for managed dedicated integration runtime. + Possible values include: "Initial", "Stopped", "Started", "Starting", "Stopping", + "NeedRegistration", "Online", "Limited", "Offline", "AccessDenied". + :vartype state: str or ~azure.synapse.artifacts.models.IntegrationRuntimeState + :param compute_properties: The compute resource for managed integration runtime. + :type compute_properties: ~azure.synapse.artifacts.models.IntegrationRuntimeComputeProperties + :param ssis_properties: SSIS properties for managed integration runtime. + :type ssis_properties: ~azure.synapse.artifacts.models.IntegrationRuntimeSsisProperties + """ + + _validation = { + 'type': {'required': True}, + 'state': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'state': {'key': 'state', 'type': 'str'}, + 'compute_properties': {'key': 'typeProperties.computeProperties', 'type': 'IntegrationRuntimeComputeProperties'}, + 'ssis_properties': {'key': 'typeProperties.ssisProperties', 'type': 'IntegrationRuntimeSsisProperties'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + compute_properties: Optional["IntegrationRuntimeComputeProperties"] = None, + ssis_properties: Optional["IntegrationRuntimeSsisProperties"] = None, + **kwargs + ): + super(ManagedIntegrationRuntime, self).__init__(additional_properties=additional_properties, description=description, **kwargs) + self.type = 'Managed' # type: str + self.state = None + self.compute_properties = compute_properties + self.ssis_properties = ssis_properties + + class MappingDataFlow(DataFlow): """Mapping data flow. @@ -12786,6 +20303,63 @@ def __init__( self.encrypted_credential = encrypted_credential +class MariaDBSource(TabularSource): + """A copy activity MariaDB server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(MariaDBSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'MariaDBSource' # type: str + self.query = query + + class MariaDBTableDataset(Dataset): """MariaDB server dataset. @@ -13007,9 +20581,66 @@ def __init__( table_name: Optional[object] = None, **kwargs ): - super(MarketoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'MarketoObject' # type: str - self.table_name = table_name + super(MarketoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'MarketoObject' # type: str + self.table_name = table_name + + +class MarketoSource(TabularSource): + """A copy activity Marketo server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(MarketoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'MarketoSource' # type: str + self.query = query class MicrosoftAccessLinkedService(LinkedService): @@ -13098,6 +20729,119 @@ def __init__( self.encrypted_credential = encrypted_credential +class MicrosoftAccessSink(CopySink): + """A copy activity Microsoft Access sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param pre_copy_script: A query to execute before starting the copy. Type: string (or + Expression with resultType string). + :type pre_copy_script: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + pre_copy_script: Optional[object] = None, + **kwargs + ): + super(MicrosoftAccessSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'MicrosoftAccessSink' # type: str + self.pre_copy_script = pre_copy_script + + +class MicrosoftAccessSource(CopySource): + """A copy activity source for Microsoft Access. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query: Database query. Type: string (or Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(MicrosoftAccessSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'MicrosoftAccessSource' # type: str + self.query = query + + class MicrosoftAccessTableDataset(Dataset): """The Microsoft Access table dataset. @@ -13237,6 +20981,55 @@ def __init__( self.collection_name = collection_name +class MongoDbCursorMethodsProperties(msrest.serialization.Model): + """Cursor methods for Mongodb query. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param project: Specifies the fields to return in the documents that match the query filter. To + return all fields in the matching documents, omit this parameter. Type: string (or Expression + with resultType string). + :type project: object + :param sort: Specifies the order in which the query returns matching documents. Type: string + (or Expression with resultType string). Type: string (or Expression with resultType string). + :type sort: object + :param skip: Specifies the how many documents skipped and where MongoDB begins returning + results. This approach may be useful in implementing paginated results. Type: integer (or + Expression with resultType integer). + :type skip: object + :param limit: Specifies the maximum number of documents the server returns. limit() is + analogous to the LIMIT statement in a SQL database. Type: integer (or Expression with + resultType integer). + :type limit: object + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'project': {'key': 'project', 'type': 'object'}, + 'sort': {'key': 'sort', 'type': 'object'}, + 'skip': {'key': 'skip', 'type': 'object'}, + 'limit': {'key': 'limit', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + project: Optional[object] = None, + sort: Optional[object] = None, + skip: Optional[object] = None, + limit: Optional[object] = None, + **kwargs + ): + super(MongoDbCursorMethodsProperties, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.project = project + self.sort = sort + self.skip = skip + self.limit = limit + + class MongoDbLinkedService(LinkedService): """Linked service for MongoDb data source. @@ -13346,6 +21139,58 @@ def __init__( self.encrypted_credential = encrypted_credential +class MongoDbSource(CopySource): + """A copy activity source for a MongoDB database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query: Database query. Should be a SQL-92 query expression. Type: string (or Expression + with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(MongoDbSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'MongoDbSource' # type: str + self.query = query + + class MongoDbV2CollectionDataset(Dataset): """The MongoDB database dataset. @@ -13477,112 +21322,76 @@ def __init__( self.database = database -class Trigger(msrest.serialization.Model): - """Azure Synapse nested object which contains information about creating pipeline run. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: MultiplePipelineTrigger, RerunTumblingWindowTrigger. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Trigger type.Constant filled by server. - :type type: str - :param description: Trigger description. - :type description: str - :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are - called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". - :vartype runtime_state: str or ~azure.synapse.artifacts.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the trigger. - :type annotations: list[object] - """ - - _validation = { - 'type': {'required': True}, - 'runtime_state': {'readonly': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - } - - _subtype_map = { - 'type': {'MultiplePipelineTrigger': 'MultiplePipelineTrigger', 'RerunTumblingWindowTrigger': 'RerunTumblingWindowTrigger'} - } - - def __init__( - self, - *, - additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - annotations: Optional[List[object]] = None, - **kwargs - ): - super(Trigger, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.type = 'Trigger' # type: str - self.description = description - self.runtime_state = None - self.annotations = annotations - - -class MultiplePipelineTrigger(Trigger): - """Base class for all triggers that support one to many model for trigger to pipeline. - - Variables are only populated by the server, and will be ignored when sending a request. +class MongoDbV2Source(CopySource): + """A copy activity source for a MongoDB database. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Trigger type.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param description: Trigger description. - :type description: str - :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are - called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". - :vartype runtime_state: str or ~azure.synapse.artifacts.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the trigger. - :type annotations: list[object] - :param pipelines: Pipelines that need to be started. - :type pipelines: list[~azure.synapse.artifacts.models.TriggerPipelineReference] + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param filter: Specifies selection filter using query operators. To return all documents in a + collection, omit this parameter or pass an empty document ({}). Type: string (or Expression + with resultType string). + :type filter: object + :param cursor_methods: Cursor methods for Mongodb query. + :type cursor_methods: ~azure.synapse.artifacts.models.MongoDbCursorMethodsProperties + :param batch_size: Specifies the number of documents to return in each batch of the response + from MongoDB instance. In most cases, modifying the batch size will not affect the user or the + application. This property's main purpose is to avoid hit the limitation of response size. + Type: integer (or Expression with resultType integer). + :type batch_size: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object """ _validation = { 'type': {'required': True}, - 'runtime_state': {'readonly': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'filter': {'key': 'filter', 'type': 'object'}, + 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, + 'batch_size': {'key': 'batchSize', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, } def __init__( self, *, additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - annotations: Optional[List[object]] = None, - pipelines: Optional[List["TriggerPipelineReference"]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + filter: Optional[object] = None, + cursor_methods: Optional["MongoDbCursorMethodsProperties"] = None, + batch_size: Optional[object] = None, + query_timeout: Optional[object] = None, **kwargs ): - super(MultiplePipelineTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) - self.type = 'MultiplePipelineTrigger' # type: str - self.pipelines = pipelines + super(MongoDbV2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'MongoDbV2Source' # type: str + self.filter = filter + self.cursor_methods = cursor_methods + self.batch_size = batch_size + self.query_timeout = query_timeout class MySqlLinkedService(LinkedService): @@ -13650,6 +21459,62 @@ def __init__( self.encrypted_credential = encrypted_credential +class MySqlSource(TabularSource): + """A copy activity source for MySQL databases. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: Database query. Type: string (or Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(MySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'MySqlSource' # type: str + self.query = query + + class MySqlTableDataset(Dataset): """The MySQL table dataset. @@ -13783,6 +21648,110 @@ def __init__( self.encrypted_credential = encrypted_credential +class NetezzaPartitionSettings(msrest.serialization.Model): + """The settings that will be leveraged for Netezza source partitioning. + + :param partition_column_name: The name of the column in integer type that will be used for + proceeding range partitioning. Type: string (or Expression with resultType string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of column specified in partitionColumnName that + will be used for proceeding range partitioning. Type: string (or Expression with resultType + string). + :type partition_upper_bound: object + :param partition_lower_bound: The minimum value of column specified in partitionColumnName that + will be used for proceeding range partitioning. Type: string (or Expression with resultType + string). + :type partition_lower_bound: object + """ + + _attribute_map = { + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + } + + def __init__( + self, + *, + partition_column_name: Optional[object] = None, + partition_upper_bound: Optional[object] = None, + partition_lower_bound: Optional[object] = None, + **kwargs + ): + super(NetezzaPartitionSettings, self).__init__(**kwargs) + self.partition_column_name = partition_column_name + self.partition_upper_bound = partition_upper_bound + self.partition_lower_bound = partition_lower_bound + + +class NetezzaSource(TabularSource): + """A copy activity Netezza source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + :param partition_option: The partition mechanism that will be used for Netezza read in + parallel. Possible values include: "None", "DataSlice", "DynamicRange". + :type partition_option: str or ~azure.synapse.artifacts.models.NetezzaPartitionOption + :param partition_settings: The settings that will be leveraged for Netezza source partitioning. + :type partition_settings: ~azure.synapse.artifacts.models.NetezzaPartitionSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'NetezzaPartitionSettings'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + partition_option: Optional[Union[str, "NetezzaPartitionOption"]] = None, + partition_settings: Optional["NetezzaPartitionSettings"] = None, + **kwargs + ): + super(NetezzaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'NetezzaSource' # type: str + self.query = query + self.partition_option = partition_option + self.partition_settings = partition_settings + + class NetezzaTableDataset(Dataset): """Netezza dataset. @@ -14182,20 +22151,22 @@ def __init__( self.language_info = language_info -class NotebookResource(SubResource): +class NotebookResource(AzureEntityResource): """Notebook resource type. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :ivar id: The resource identifier. + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. :vartype id: str - :ivar name: The resource name. + :ivar name: The name of the resource. :vartype name: str - :ivar type: The resource type. + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. :vartype type: str - :ivar etag: Etag identifies change in the resource. + :ivar etag: Resource Etag. :vartype etag: str :param properties: Required. Properties of Notebook. :type properties: ~azure.synapse.artifacts.models.Notebook @@ -14470,6 +22441,58 @@ def __init__( self.path = path +class ODataSource(CopySource): + """A copy activity source for OData source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query: OData query. For example, "$top=1". Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(ODataSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'ODataSource' # type: str + self.query = query + + class OdbcLinkedService(LinkedService): """Open Database Connectivity (ODBC) linked service. @@ -14511,48 +22534,166 @@ class OdbcLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'connection_string': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + *, + connection_string: object, + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + authentication_type: Optional[object] = None, + credential: Optional["SecretBase"] = None, + user_name: Optional[object] = None, + password: Optional["SecretBase"] = None, + encrypted_credential: Optional[object] = None, + **kwargs + ): + super(OdbcLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'Odbc' # type: str + self.connection_string = connection_string + self.authentication_type = authentication_type + self.credential = credential + self.user_name = user_name + self.password = password + self.encrypted_credential = encrypted_credential + + +class OdbcSink(CopySink): + """A copy activity ODBC sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param pre_copy_script: A query to execute before starting the copy. Type: string (or + Expression with resultType string). + :type pre_copy_script: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + pre_copy_script: Optional[object] = None, + **kwargs + ): + super(OdbcSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'OdbcSink' # type: str + self.pre_copy_script = pre_copy_script + + +class OdbcSource(TabularSource): + """A copy activity source for ODBC databases. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: Database query. Type: string (or Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, - 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, } def __init__( self, *, - connection_string: object, additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, - description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - authentication_type: Optional[object] = None, - credential: Optional["SecretBase"] = None, - user_name: Optional[object] = None, - password: Optional["SecretBase"] = None, - encrypted_credential: Optional[object] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, **kwargs ): - super(OdbcLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'Odbc' # type: str - self.connection_string = connection_string - self.authentication_type = authentication_type - self.credential = credential - self.user_name = user_name - self.password = password - self.encrypted_credential = encrypted_credential + super(OdbcSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'OdbcSource' # type: str + self.query = query class OdbcTableDataset(Dataset): @@ -14780,6 +22921,89 @@ def __init__( self.encrypted_credential = encrypted_credential +class Office365Source(CopySource): + """A copy activity source for an Office 365 service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param allowed_groups: The groups containing all the users. Type: array of strings (or + Expression with resultType array of strings). + :type allowed_groups: object + :param user_scope_filter_uri: The user scope uri. Type: string (or Expression with resultType + string). + :type user_scope_filter_uri: object + :param date_filter_column: The Column to apply the :code:`` and + :code:``. Type: string (or Expression with resultType string). + :type date_filter_column: object + :param start_time: Start time of the requested range for this dataset. Type: string (or + Expression with resultType string). + :type start_time: object + :param end_time: End time of the requested range for this dataset. Type: string (or Expression + with resultType string). + :type end_time: object + :param output_columns: The columns to be read out from the Office 365 table. Type: array of + objects (or Expression with resultType array of objects). Example: [ { "name": "Id" }, { + "name": "CreatedDateTime" } ]. + :type output_columns: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'allowed_groups': {'key': 'allowedGroups', 'type': 'object'}, + 'user_scope_filter_uri': {'key': 'userScopeFilterUri', 'type': 'object'}, + 'date_filter_column': {'key': 'dateFilterColumn', 'type': 'object'}, + 'start_time': {'key': 'startTime', 'type': 'object'}, + 'end_time': {'key': 'endTime', 'type': 'object'}, + 'output_columns': {'key': 'outputColumns', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + allowed_groups: Optional[object] = None, + user_scope_filter_uri: Optional[object] = None, + date_filter_column: Optional[object] = None, + start_time: Optional[object] = None, + end_time: Optional[object] = None, + output_columns: Optional[object] = None, + **kwargs + ): + super(Office365Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'Office365Source' # type: str + self.allowed_groups = allowed_groups + self.user_scope_filter_uri = user_scope_filter_uri + self.date_filter_column = date_filter_column + self.start_time = start_time + self.end_time = end_time + self.output_columns = output_columns + + class OracleLinkedService(LinkedService): """Oracle database. @@ -14846,6 +23070,47 @@ def __init__( self.encrypted_credential = encrypted_credential +class OraclePartitionSettings(msrest.serialization.Model): + """The settings that will be leveraged for Oracle source partitioning. + + :param partition_names: Names of the physical partitions of Oracle table. + :type partition_names: object + :param partition_column_name: The name of the column in integer type that will be used for + proceeding range partitioning. Type: string (or Expression with resultType string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of column specified in partitionColumnName that + will be used for proceeding range partitioning. Type: string (or Expression with resultType + string). + :type partition_upper_bound: object + :param partition_lower_bound: The minimum value of column specified in partitionColumnName that + will be used for proceeding range partitioning. Type: string (or Expression with resultType + string). + :type partition_lower_bound: object + """ + + _attribute_map = { + 'partition_names': {'key': 'partitionNames', 'type': 'object'}, + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + } + + def __init__( + self, + *, + partition_names: Optional[object] = None, + partition_column_name: Optional[object] = None, + partition_upper_bound: Optional[object] = None, + partition_lower_bound: Optional[object] = None, + **kwargs + ): + super(OraclePartitionSettings, self).__init__(**kwargs) + self.partition_names = partition_names + self.partition_column_name = partition_column_name + self.partition_upper_bound = partition_upper_bound + self.partition_lower_bound = partition_lower_bound + + class OracleServiceCloudLinkedService(LinkedService): """Oracle Service Cloud linked service. @@ -15002,9 +23267,197 @@ def __init__( table_name: Optional[object] = None, **kwargs ): - super(OracleServiceCloudObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'OracleServiceCloudObject' # type: str - self.table_name = table_name + super(OracleServiceCloudObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'OracleServiceCloudObject' # type: str + self.table_name = table_name + + +class OracleServiceCloudSource(TabularSource): + """A copy activity Oracle Service Cloud source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(OracleServiceCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'OracleServiceCloudSource' # type: str + self.query = query + + +class OracleSink(CopySink): + """A copy activity Oracle sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType + string). + :type pre_copy_script: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + pre_copy_script: Optional[object] = None, + **kwargs + ): + super(OracleSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'OracleSink' # type: str + self.pre_copy_script = pre_copy_script + + +class OracleSource(CopySource): + """A copy activity Oracle source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param oracle_reader_query: Oracle reader query. Type: string (or Expression with resultType + string). + :type oracle_reader_query: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param partition_option: The partition mechanism that will be used for Oracle read in parallel. + Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". + :type partition_option: str or ~azure.synapse.artifacts.models.OraclePartitionOption + :param partition_settings: The settings that will be leveraged for Oracle source partitioning. + :type partition_settings: ~azure.synapse.artifacts.models.OraclePartitionSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'oracle_reader_query': {'key': 'oracleReaderQuery', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'OraclePartitionSettings'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + oracle_reader_query: Optional[object] = None, + query_timeout: Optional[object] = None, + partition_option: Optional[Union[str, "OraclePartitionOption"]] = None, + partition_settings: Optional["OraclePartitionSettings"] = None, + **kwargs + ): + super(OracleSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'OracleSource' # type: str + self.oracle_reader_query = oracle_reader_query + self.query_timeout = query_timeout + self.partition_option = partition_option + self.partition_settings = partition_settings class OracleTableDataset(Dataset): @@ -15161,6 +23614,157 @@ def __init__( self.orc_compression_codec = orc_compression_codec +class OrcFormat(DatasetStorageFormat): + """The data stored in Optimized Row Columnar (ORC) format. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage format.Constant filled by server. + :type type: str + :param serializer: Serializer. Type: string (or Expression with resultType string). + :type serializer: object + :param deserializer: Deserializer. Type: string (or Expression with resultType string). + :type deserializer: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'serializer': {'key': 'serializer', 'type': 'object'}, + 'deserializer': {'key': 'deserializer', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + serializer: Optional[object] = None, + deserializer: Optional[object] = None, + **kwargs + ): + super(OrcFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs) + self.type = 'OrcFormat' # type: str + + +class OrcSink(CopySink): + """A copy activity ORC sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param store_settings: ORC store settings. + :type store_settings: ~azure.synapse.artifacts.models.StoreWriteSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + store_settings: Optional["StoreWriteSettings"] = None, + **kwargs + ): + super(OrcSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'OrcSink' # type: str + self.store_settings = store_settings + + +class OrcSource(CopySource): + """A copy activity ORC source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param store_settings: ORC store settings. + :type store_settings: ~azure.synapse.artifacts.models.StoreReadSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + store_settings: Optional["StoreReadSettings"] = None, + **kwargs + ): + super(OrcSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'OrcSource' # type: str + self.store_settings = store_settings + + class ParameterSpecification(msrest.serialization.Model): """Definition of a single parameter for an entity. @@ -15179,92 +23783,243 @@ class ParameterSpecification(msrest.serialization.Model): _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, - 'default_value': {'key': 'defaultValue', 'type': 'object'}, + 'default_value': {'key': 'defaultValue', 'type': 'object'}, + } + + def __init__( + self, + *, + type: Union[str, "ParameterType"], + default_value: Optional[object] = None, + **kwargs + ): + super(ParameterSpecification, self).__init__(**kwargs) + self.type = type + self.default_value = default_value + + +class ParquetDataset(Dataset): + """Parquet dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param location: The location of the parquet storage. + :type location: ~azure.synapse.artifacts.models.DatasetLocation + :param compression_codec: Possible values include: "none", "gzip", "snappy", "lzo". + :type compression_codec: str or ~azure.synapse.artifacts.models.ParquetCompressionCodec + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'str'}, + } + + def __init__( + self, + *, + linked_service_name: "LinkedServiceReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + location: Optional["DatasetLocation"] = None, + compression_codec: Optional[Union[str, "ParquetCompressionCodec"]] = None, + **kwargs + ): + super(ParquetDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'Parquet' # type: str + self.location = location + self.compression_codec = compression_codec + + +class ParquetFormat(DatasetStorageFormat): + """The data stored in Parquet format. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage format.Constant filled by server. + :type type: str + :param serializer: Serializer. Type: string (or Expression with resultType string). + :type serializer: object + :param deserializer: Deserializer. Type: string (or Expression with resultType string). + :type deserializer: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'serializer': {'key': 'serializer', 'type': 'object'}, + 'deserializer': {'key': 'deserializer', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + serializer: Optional[object] = None, + deserializer: Optional[object] = None, + **kwargs + ): + super(ParquetFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs) + self.type = 'ParquetFormat' # type: str + + +class ParquetSink(CopySink): + """A copy activity Parquet sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param store_settings: Parquet store settings. + :type store_settings: ~azure.synapse.artifacts.models.StoreWriteSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, } def __init__( self, *, - type: Union[str, "ParameterType"], - default_value: Optional[object] = None, + additional_properties: Optional[Dict[str, object]] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + store_settings: Optional["StoreWriteSettings"] = None, **kwargs ): - super(ParameterSpecification, self).__init__(**kwargs) - self.type = type - self.default_value = default_value + super(ParquetSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'ParquetSink' # type: str + self.store_settings = store_settings -class ParquetDataset(Dataset): - """Parquet dataset. +class ParquetSource(CopySource): + """A copy activity Parquet source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param location: The location of the parquet storage. - :type location: ~azure.synapse.artifacts.models.DatasetLocation - :param compression_codec: Possible values include: "none", "gzip", "snappy", "lzo". - :type compression_codec: str or ~azure.synapse.artifacts.models.ParquetCompressionCodec + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param store_settings: Parquet store settings. + :type store_settings: ~azure.synapse.artifacts.models.StoreReadSettings """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, } def __init__( self, *, - linked_service_name: "LinkedServiceReference", additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, - location: Optional["DatasetLocation"] = None, - compression_codec: Optional[Union[str, "ParquetCompressionCodec"]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + store_settings: Optional["StoreReadSettings"] = None, **kwargs ): - super(ParquetDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'Parquet' # type: str - self.location = location - self.compression_codec = compression_codec + super(ParquetSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'ParquetSource' # type: str + self.store_settings = store_settings class PaypalLinkedService(LinkedService): @@ -15425,6 +24180,63 @@ def __init__( self.table_name = table_name +class PaypalSource(TabularSource): + """A copy activity Paypal Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(PaypalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'PaypalSource' # type: str + self.query = query + + class PhoenixLinkedService(LinkedService): """Phoenix server linked service. @@ -15629,6 +24441,63 @@ def __init__( self.schema_type_properties_schema = schema_type_properties_schema +class PhoenixSource(TabularSource): + """A copy activity Phoenix server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(PhoenixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'PhoenixSource' # type: str + self.query = query + + class PipelineFolder(msrest.serialization.Model): """The folder that this Pipeline is in. If not specified, Pipeline will appear at the root level. @@ -15720,18 +24589,20 @@ def __init__( self.name = name -class PipelineResource(SubResource): +class PipelineResource(AzureEntityResource): """Pipeline resource type. Variables are only populated by the server, and will be ignored when sending a request. - :ivar id: The resource identifier. + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. :vartype id: str - :ivar name: The resource name. + :ivar name: The name of the resource. :vartype name: str - :ivar type: The resource type. + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. :vartype type: str - :ivar etag: Etag identifies change in the resource. + :ivar etag: Resource Etag. :vartype etag: str :param additional_properties: Unmatched properties from the message are deserialized to this collection. @@ -15961,6 +24832,53 @@ def __init__( self.continuation_token = continuation_token +class PolybaseSettings(msrest.serialization.Model): + """PolyBase settings. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param reject_type: Reject type. Possible values include: "value", "percentage". + :type reject_type: str or ~azure.synapse.artifacts.models.PolybaseSettingsRejectType + :param reject_value: Specifies the value or the percentage of rows that can be rejected before + the query fails. Type: number (or Expression with resultType number), minimum: 0. + :type reject_value: object + :param reject_sample_value: Determines the number of rows to attempt to retrieve before the + PolyBase recalculates the percentage of rejected rows. Type: integer (or Expression with + resultType integer), minimum: 0. + :type reject_sample_value: object + :param use_type_default: Specifies how to handle missing values in delimited text files when + PolyBase retrieves data from the text file. Type: boolean (or Expression with resultType + boolean). + :type use_type_default: object + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'reject_type': {'key': 'rejectType', 'type': 'str'}, + 'reject_value': {'key': 'rejectValue', 'type': 'object'}, + 'reject_sample_value': {'key': 'rejectSampleValue', 'type': 'object'}, + 'use_type_default': {'key': 'useTypeDefault', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + reject_type: Optional[Union[str, "PolybaseSettingsRejectType"]] = None, + reject_value: Optional[object] = None, + reject_sample_value: Optional[object] = None, + use_type_default: Optional[object] = None, + **kwargs + ): + super(PolybaseSettings, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.reject_type = reject_type + self.reject_value = reject_value + self.reject_sample_value = reject_sample_value + self.use_type_default = use_type_default + + class PostgreSqlLinkedService(LinkedService): """Linked service for PostgreSQL data source. @@ -16009,21 +24927,77 @@ class PostgreSqlLinkedService(LinkedService): def __init__( self, *, - connection_string: object, + connection_string: object, + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + password: Optional["AzureKeyVaultSecretReference"] = None, + encrypted_credential: Optional[object] = None, + **kwargs + ): + super(PostgreSqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'PostgreSql' # type: str + self.connection_string = connection_string + self.password = password + self.encrypted_credential = encrypted_credential + + +class PostgreSqlSource(TabularSource): + """A copy activity source for PostgreSQL databases. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: Database query. Type: string (or Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, - description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - password: Optional["AzureKeyVaultSecretReference"] = None, - encrypted_credential: Optional[object] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, **kwargs ): - super(PostgreSqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'PostgreSql' # type: str - self.connection_string = connection_string - self.password = password - self.encrypted_credential = encrypted_credential + super(PostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'PostgreSqlSource' # type: str + self.query = query class PostgreSqlTableDataset(Dataset): @@ -16320,6 +25294,211 @@ def __init__( self.schema_type_properties_schema = schema_type_properties_schema +class PrestoSource(TabularSource): + """A copy activity Presto server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(PrestoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'PrestoSource' # type: str + self.query = query + + +class PrivateEndpoint(msrest.serialization.Model): + """Private endpoint details. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Resource id of the private endpoint. + :vartype id: str + """ + + _validation = { + 'id': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(PrivateEndpoint, self).__init__(**kwargs) + self.id = None + + +class PrivateEndpointConnection(Resource): + """A private endpoint connection. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + :param private_endpoint: The private endpoint which the connection belongs to. + :type private_endpoint: ~azure.synapse.artifacts.models.PrivateEndpoint + :param private_link_service_connection_state: Connection state of the private endpoint + connection. + :type private_link_service_connection_state: + ~azure.synapse.artifacts.models.PrivateLinkServiceConnectionState + :ivar provisioning_state: Provisioning state of the private endpoint connection. + :vartype provisioning_state: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'private_endpoint': {'key': 'properties.privateEndpoint', 'type': 'PrivateEndpoint'}, + 'private_link_service_connection_state': {'key': 'properties.privateLinkServiceConnectionState', 'type': 'PrivateLinkServiceConnectionState'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + } + + def __init__( + self, + *, + private_endpoint: Optional["PrivateEndpoint"] = None, + private_link_service_connection_state: Optional["PrivateLinkServiceConnectionState"] = None, + **kwargs + ): + super(PrivateEndpointConnection, self).__init__(**kwargs) + self.private_endpoint = private_endpoint + self.private_link_service_connection_state = private_link_service_connection_state + self.provisioning_state = None + + +class PrivateLinkServiceConnectionState(msrest.serialization.Model): + """Connection state details of the private endpoint. + + Variables are only populated by the server, and will be ignored when sending a request. + + :param status: The private link service connection status. Possible values include: "Approved", + "Pending", "Rejected", "Disconnected". + :type status: str or ~azure.synapse.artifacts.models.PrivateLinkServiceConnectionStateStatus + :param description: The private link service connection description. + :type description: str + :ivar actions_required: The actions required for private link service connection. + :vartype actions_required: str + """ + + _validation = { + 'actions_required': {'readonly': True}, + } + + _attribute_map = { + 'status': {'key': 'status', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'actions_required': {'key': 'actionsRequired', 'type': 'str'}, + } + + def __init__( + self, + *, + status: Optional[Union[str, "PrivateLinkServiceConnectionStateStatus"]] = None, + description: Optional[str] = None, + **kwargs + ): + super(PrivateLinkServiceConnectionState, self).__init__(**kwargs) + self.status = status + self.description = description + self.actions_required = None + + +class ProxyResource(Resource): + """The resource model definition for a ARM proxy resource. It will have everything other than required location and tags. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ProxyResource, self).__init__(**kwargs) + + class QueryDataFlowDebugSessionsResponse(msrest.serialization.Model): """A list of active debug sessions. @@ -16495,20 +25674,157 @@ class QuickBooksObjectDataset(Dataset): def __init__( self, *, - linked_service_name: "LinkedServiceReference", + linked_service_name: "LinkedServiceReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + table_name: Optional[object] = None, + **kwargs + ): + super(QuickBooksObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'QuickBooksObject' # type: str + self.table_name = table_name + + +class QuickBooksSource(TabularSource): + """A copy activity QuickBooks server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(QuickBooksSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'QuickBooksSource' # type: str + self.query = query + + +class RecurrenceSchedule(msrest.serialization.Model): + """The recurrence schedule. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param minutes: The minutes. + :type minutes: list[int] + :param hours: The hours. + :type hours: list[int] + :param week_days: The days of the week. + :type week_days: list[str or ~azure.synapse.artifacts.models.DayOfWeek] + :param month_days: The month days. + :type month_days: list[int] + :param monthly_occurrences: The monthly occurrences. + :type monthly_occurrences: list[~azure.synapse.artifacts.models.RecurrenceScheduleOccurrence] + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'minutes': {'key': 'minutes', 'type': '[int]'}, + 'hours': {'key': 'hours', 'type': '[int]'}, + 'week_days': {'key': 'weekDays', 'type': '[str]'}, + 'month_days': {'key': 'monthDays', 'type': '[int]'}, + 'monthly_occurrences': {'key': 'monthlyOccurrences', 'type': '[RecurrenceScheduleOccurrence]'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + minutes: Optional[List[int]] = None, + hours: Optional[List[int]] = None, + week_days: Optional[List[Union[str, "DayOfWeek"]]] = None, + month_days: Optional[List[int]] = None, + monthly_occurrences: Optional[List["RecurrenceScheduleOccurrence"]] = None, + **kwargs + ): + super(RecurrenceSchedule, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.minutes = minutes + self.hours = hours + self.week_days = week_days + self.month_days = month_days + self.monthly_occurrences = monthly_occurrences + + +class RecurrenceScheduleOccurrence(msrest.serialization.Model): + """The recurrence schedule occurrence. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param day: The day of the week. Possible values include: "Sunday", "Monday", "Tuesday", + "Wednesday", "Thursday", "Friday", "Saturday". + :type day: str or ~azure.synapse.artifacts.models.DayOfWeek + :param occurrence: The occurrence. + :type occurrence: int + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'day': {'key': 'day', 'type': 'str'}, + 'occurrence': {'key': 'occurrence', 'type': 'int'}, + } + + def __init__( + self, + *, additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, + day: Optional[Union[str, "DayOfWeek"]] = None, + occurrence: Optional[int] = None, **kwargs ): - super(QuickBooksObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'QuickBooksObject' # type: str - self.table_name = table_name + super(RecurrenceScheduleOccurrence, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.day = day + self.occurrence = occurrence class RedirectIncompatibleRowSettings(msrest.serialization.Model): @@ -16553,6 +25869,93 @@ def __init__( self.path = path +class RedshiftUnloadSettings(msrest.serialization.Model): + """The Amazon S3 settings needed for the interim Amazon S3 when copying from Amazon Redshift with unload. With this, data from Amazon Redshift source will be unloaded into S3 first and then copied into the targeted sink from the interim S3. + + All required parameters must be populated in order to send to Azure. + + :param s3_linked_service_name: Required. The name of the Amazon S3 linked service which will be + used for the unload operation when copying from the Amazon Redshift source. + :type s3_linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param bucket_name: Required. The bucket of the interim Amazon S3 which will be used to store + the unloaded data from Amazon Redshift source. The bucket must be in the same region as the + Amazon Redshift source. Type: string (or Expression with resultType string). + :type bucket_name: object + """ + + _validation = { + 's3_linked_service_name': {'required': True}, + 'bucket_name': {'required': True}, + } + + _attribute_map = { + 's3_linked_service_name': {'key': 's3LinkedServiceName', 'type': 'LinkedServiceReference'}, + 'bucket_name': {'key': 'bucketName', 'type': 'object'}, + } + + def __init__( + self, + *, + s3_linked_service_name: "LinkedServiceReference", + bucket_name: object, + **kwargs + ): + super(RedshiftUnloadSettings, self).__init__(**kwargs) + self.s3_linked_service_name = s3_linked_service_name + self.bucket_name = bucket_name + + +class RelationalSource(CopySource): + """A copy activity source for various relational databases. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query: Database query. Type: string (or Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(RelationalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'RelationalSource' # type: str + self.query = query + + class RelationalTableDataset(Dataset): """The relational table dataset. @@ -16657,20 +26060,22 @@ def __init__( self.next_link = None -class RerunTriggerResource(SubResource): +class RerunTriggerResource(AzureEntityResource): """RerunTrigger resource type. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :ivar id: The resource identifier. + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. :vartype id: str - :ivar name: The resource name. + :ivar name: The name of the resource. :vartype name: str - :ivar type: The resource type. + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. :vartype type: str - :ivar etag: Etag identifies change in the resource. + :ivar etag: Resource Etag. :vartype etag: str :param properties: Required. Properties of the rerun trigger. :type properties: ~azure.synapse.artifacts.models.RerunTumblingWindowTrigger @@ -16816,57 +26221,6 @@ def __init__( self.max_concurrency = max_concurrency -class Resource(msrest.serialization.Model): - """Azure Synapse top-level resource. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :param location: The resource location. - :type location: str - :param tags: A set of tags. The resource tags. - :type tags: dict[str, str] - :ivar e_tag: Etag identifies change in the resource. - :vartype e_tag: str - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'e_tag': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'location': {'key': 'location', 'type': 'str'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'e_tag': {'key': 'eTag', 'type': 'str'}, - } - - def __init__( - self, - *, - location: Optional[str] = None, - tags: Optional[Dict[str, str]] = None, - **kwargs - ): - super(Resource, self).__init__(**kwargs) - self.id = None - self.name = None - self.type = None - self.location = location - self.tags = tags - self.e_tag = None - - class ResponsysLinkedService(LinkedService): """Responsys linked service. @@ -17028,6 +26382,63 @@ def __init__( self.table_name = table_name +class ResponsysSource(TabularSource): + """A copy activity Responsys source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(ResponsysSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'ResponsysSource' # type: str + self.query = query + + class RestResourceDataset(Dataset): """A Rest service dataset. @@ -17215,18 +26626,132 @@ def __init__( encrypted_credential: Optional[object] = None, **kwargs ): - super(RestServiceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'RestService' # type: str - self.url = url - self.enable_server_certificate_validation = enable_server_certificate_validation - self.authentication_type = authentication_type - self.user_name = user_name - self.password = password - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.tenant = tenant - self.aad_resource_id = aad_resource_id - self.encrypted_credential = encrypted_credential + super(RestServiceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'RestService' # type: str + self.url = url + self.enable_server_certificate_validation = enable_server_certificate_validation + self.authentication_type = authentication_type + self.user_name = user_name + self.password = password + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.aad_resource_id = aad_resource_id + self.encrypted_credential = encrypted_credential + + +class RestSource(CopySource): + """A copy activity Rest service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param request_method: The HTTP method used to call the RESTful API. The default is GET. Type: + string (or Expression with resultType string). + :type request_method: object + :param request_body: The HTTP request body to the RESTful API if requestMethod is POST. Type: + string (or Expression with resultType string). + :type request_body: object + :param additional_headers: The additional HTTP headers in the request to the RESTful API. Type: + string (or Expression with resultType string). + :type additional_headers: object + :param pagination_rules: The pagination rules to compose next page requests. Type: string (or + Expression with resultType string). + :type pagination_rules: object + :param http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout + to get a response, not the timeout to read response data. Default value: 00:01:40. Type: string + (or Expression with resultType string), pattern: + ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type http_request_timeout: object + :param request_interval: The time to await before sending next page request. + :type request_interval: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'request_method': {'key': 'requestMethod', 'type': 'object'}, + 'request_body': {'key': 'requestBody', 'type': 'object'}, + 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, + 'pagination_rules': {'key': 'paginationRules', 'type': 'object'}, + 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, + 'request_interval': {'key': 'requestInterval', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + request_method: Optional[object] = None, + request_body: Optional[object] = None, + additional_headers: Optional[object] = None, + pagination_rules: Optional[object] = None, + http_request_timeout: Optional[object] = None, + request_interval: Optional[object] = None, + **kwargs + ): + super(RestSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'RestSource' # type: str + self.request_method = request_method + self.request_body = request_body + self.additional_headers = additional_headers + self.pagination_rules = pagination_rules + self.http_request_timeout = http_request_timeout + self.request_interval = request_interval + + +class RetryPolicy(msrest.serialization.Model): + """Execution policy for an activity. + + :param count: Maximum ordinary retry attempts. Default is 0. Type: integer (or Expression with + resultType integer), minimum: 0. + :type count: object + :param interval_in_seconds: Interval between retries in seconds. Default is 30. + :type interval_in_seconds: int + """ + + _validation = { + 'interval_in_seconds': {'maximum': 86400, 'minimum': 30}, + } + + _attribute_map = { + 'count': {'key': 'count', 'type': 'object'}, + 'interval_in_seconds': {'key': 'intervalInSeconds', 'type': 'int'}, + } + + def __init__( + self, + *, + count: Optional[object] = None, + interval_in_seconds: Optional[int] = None, + **kwargs + ): + super(RetryPolicy, self).__init__(**kwargs) + self.count = count + self.interval_in_seconds = interval_in_seconds class RunFilterParameters(msrest.serialization.Model): @@ -17596,6 +27121,63 @@ def __init__( self.table_name = table_name +class SalesforceMarketingCloudSource(TabularSource): + """A copy activity Salesforce Marketing Cloud source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(SalesforceMarketingCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'SalesforceMarketingCloudSource' # type: str + self.query = query + + class SalesforceObjectDataset(Dataset): """The Salesforce object dataset. @@ -17802,20 +27384,295 @@ class SalesforceServiceCloudObjectDataset(Dataset): def __init__( self, *, - linked_service_name: "LinkedServiceReference", + linked_service_name: "LinkedServiceReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + object_api_name: Optional[object] = None, + **kwargs + ): + super(SalesforceServiceCloudObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'SalesforceServiceCloudObject' # type: str + self.object_api_name = object_api_name + + +class SalesforceServiceCloudSink(CopySink): + """A copy activity Salesforce Service Cloud sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param write_behavior: The write behavior for the operation. Default is Insert. Possible values + include: "Insert", "Upsert". + :type write_behavior: str or ~azure.synapse.artifacts.models.SalesforceSinkWriteBehavior + :param external_id_field_name: The name of the external ID field for upsert operation. Default + value is 'Id' column. Type: string (or Expression with resultType string). + :type external_id_field_name: object + :param ignore_null_values: The flag indicating whether or not to ignore null values from input + dataset (except key fields) during write operation. Default value is false. If set it to true, + it means ADF will leave the data in the destination object unchanged when doing upsert/update + operation and insert defined default value when doing insert operation, versus ADF will update + the data in the destination object to NULL when doing upsert/update operation and insert NULL + value when doing insert operation. Type: boolean (or Expression with resultType boolean). + :type ignore_null_values: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, + 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + write_behavior: Optional[Union[str, "SalesforceSinkWriteBehavior"]] = None, + external_id_field_name: Optional[object] = None, + ignore_null_values: Optional[object] = None, + **kwargs + ): + super(SalesforceServiceCloudSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'SalesforceServiceCloudSink' # type: str + self.write_behavior = write_behavior + self.external_id_field_name = external_id_field_name + self.ignore_null_values = ignore_null_values + + +class SalesforceServiceCloudSource(CopySource): + """A copy activity Salesforce Service Cloud source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query: Database query. Type: string (or Expression with resultType string). + :type query: object + :param read_behavior: The read behavior for the operation. Default is Query. Possible values + include: "Query", "QueryAll". + :type read_behavior: str or ~azure.synapse.artifacts.models.SalesforceSourceReadBehavior + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query: Optional[object] = None, + read_behavior: Optional[Union[str, "SalesforceSourceReadBehavior"]] = None, + **kwargs + ): + super(SalesforceServiceCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'SalesforceServiceCloudSource' # type: str + self.query = query + self.read_behavior = read_behavior + + +class SalesforceSink(CopySink): + """A copy activity Salesforce sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param write_behavior: The write behavior for the operation. Default is Insert. Possible values + include: "Insert", "Upsert". + :type write_behavior: str or ~azure.synapse.artifacts.models.SalesforceSinkWriteBehavior + :param external_id_field_name: The name of the external ID field for upsert operation. Default + value is 'Id' column. Type: string (or Expression with resultType string). + :type external_id_field_name: object + :param ignore_null_values: The flag indicating whether or not to ignore null values from input + dataset (except key fields) during write operation. Default value is false. If set it to true, + it means ADF will leave the data in the destination object unchanged when doing upsert/update + operation and insert defined default value when doing insert operation, versus ADF will update + the data in the destination object to NULL when doing upsert/update operation and insert NULL + value when doing insert operation. Type: boolean (or Expression with resultType boolean). + :type ignore_null_values: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, + 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + write_behavior: Optional[Union[str, "SalesforceSinkWriteBehavior"]] = None, + external_id_field_name: Optional[object] = None, + ignore_null_values: Optional[object] = None, + **kwargs + ): + super(SalesforceSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'SalesforceSink' # type: str + self.write_behavior = write_behavior + self.external_id_field_name = external_id_field_name + self.ignore_null_values = ignore_null_values + + +class SalesforceSource(TabularSource): + """A copy activity Salesforce source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: Database query. Type: string (or Expression with resultType string). + :type query: object + :param read_behavior: The read behavior for the operation. Default is Query. Possible values + include: "Query", "QueryAll". + :type read_behavior: str or ~azure.synapse.artifacts.models.SalesforceSourceReadBehavior + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, + } + + def __init__( + self, + *, additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, - object_api_name: Optional[object] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + read_behavior: Optional[Union[str, "SalesforceSourceReadBehavior"]] = None, **kwargs ): - super(SalesforceServiceCloudObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'SalesforceServiceCloudObject' # type: str - self.object_api_name = object_api_name + super(SalesforceSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'SalesforceSource' # type: str + self.query = query + self.read_behavior = read_behavior class SapBwCubeDataset(Dataset): @@ -17967,6 +27824,62 @@ def __init__( self.encrypted_credential = encrypted_credential +class SapBwSource(TabularSource): + """A copy activity source for SapBW server via MDX. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: MDX query. Type: string (or Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(SapBwSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'SapBwSource' # type: str + self.query = query + + class SapCloudForCustomerLinkedService(LinkedService): """Linked service for SAP Cloud for Customer. @@ -18110,6 +28023,126 @@ def __init__( self.path = path +class SapCloudForCustomerSink(CopySink): + """A copy activity SAP Cloud for Customer sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param write_behavior: The write behavior for the operation. Default is 'Insert'. Possible + values include: "Insert", "Update". + :type write_behavior: str or + ~azure.synapse.artifacts.models.SapCloudForCustomerSinkWriteBehavior + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + write_behavior: Optional[Union[str, "SapCloudForCustomerSinkWriteBehavior"]] = None, + **kwargs + ): + super(SapCloudForCustomerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'SapCloudForCustomerSink' # type: str + self.write_behavior = write_behavior + + +class SapCloudForCustomerSource(TabularSource): + """A copy activity source for SAP Cloud for Customer source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: SAP Cloud for Customer OData query. For example, "$top=1". Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(SapCloudForCustomerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'SapCloudForCustomerSource' # type: str + self.query = query + + class SapEccLinkedService(LinkedService): """Linked service for SAP ERP Central Component(SAP ECC). @@ -18253,6 +28286,63 @@ def __init__( self.path = path +class SapEccSource(TabularSource): + """A copy activity source for SAP ECC source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: SAP ECC OData query. For example, "$top=1". Type: string (or Expression with + resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(SapEccSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'SapEccSource' # type: str + self.query = query + + class SapHanaLinkedService(LinkedService): """SAP HANA Linked Service. @@ -18327,14 +28417,110 @@ def __init__( encrypted_credential: Optional[object] = None, **kwargs ): - super(SapHanaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'SapHana' # type: str - self.connection_string = connection_string - self.server = server - self.authentication_type = authentication_type - self.user_name = user_name - self.password = password - self.encrypted_credential = encrypted_credential + super(SapHanaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'SapHana' # type: str + self.connection_string = connection_string + self.server = server + self.authentication_type = authentication_type + self.user_name = user_name + self.password = password + self.encrypted_credential = encrypted_credential + + +class SapHanaPartitionSettings(msrest.serialization.Model): + """The settings that will be leveraged for SAP HANA source partitioning. + + :param partition_column_name: The name of the column that will be used for proceeding range + partitioning. Type: string (or Expression with resultType string). + :type partition_column_name: object + """ + + _attribute_map = { + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + } + + def __init__( + self, + *, + partition_column_name: Optional[object] = None, + **kwargs + ): + super(SapHanaPartitionSettings, self).__init__(**kwargs) + self.partition_column_name = partition_column_name + + +class SapHanaSource(TabularSource): + """A copy activity source for SAP HANA source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: SAP HANA Sql query. Type: string (or Expression with resultType string). + :type query: object + :param packet_size: The packet size of data read from SAP HANA. Type: integer(or Expression + with resultType integer). + :type packet_size: object + :param partition_option: The partition mechanism that will be used for SAP HANA read in + parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "SapHanaDynamicRange". + :type partition_option: str or ~azure.synapse.artifacts.models.SapHanaPartitionOption + :param partition_settings: The settings that will be leveraged for SAP HANA source + partitioning. + :type partition_settings: ~azure.synapse.artifacts.models.SapHanaPartitionSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + 'packet_size': {'key': 'packetSize', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'SapHanaPartitionSettings'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + packet_size: Optional[object] = None, + partition_option: Optional[Union[str, "SapHanaPartitionOption"]] = None, + partition_settings: Optional["SapHanaPartitionSettings"] = None, + **kwargs + ): + super(SapHanaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'SapHanaSource' # type: str + self.query = query + self.packet_size = packet_size + self.partition_option = partition_option + self.partition_settings = partition_settings class SapHanaTableDataset(Dataset): @@ -18506,6 +28692,70 @@ def __init__( self.encrypted_credential = encrypted_credential +class SapOpenHubSource(TabularSource): + """A copy activity source for SAP Business Warehouse Open Hub Destination source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param exclude_last_request: Whether to exclude the records of the last request. The default + value is true. Type: boolean (or Expression with resultType boolean). + :type exclude_last_request: object + :param base_request_id: The ID of request for delta loading. Once it is set, only data with + requestId larger than the value of this property will be retrieved. The default value is 0. + Type: integer (or Expression with resultType integer ). + :type base_request_id: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'exclude_last_request': {'key': 'excludeLastRequest', 'type': 'object'}, + 'base_request_id': {'key': 'baseRequestId', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + exclude_last_request: Optional[object] = None, + base_request_id: Optional[object] = None, + **kwargs + ): + super(SapOpenHubSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'SapOpenHubSource' # type: str + self.exclude_last_request = exclude_last_request + self.base_request_id = base_request_id + + class SapOpenHubTableDataset(Dataset): """Sap Business Warehouse Open Hub Destination Table properties. @@ -18734,6 +28984,48 @@ def __init__( self.encrypted_credential = encrypted_credential +class SapTablePartitionSettings(msrest.serialization.Model): + """The settings that will be leveraged for SAP table source partitioning. + + :param partition_column_name: The name of the column that will be used for proceeding range + partitioning. Type: string (or Expression with resultType string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of column specified in partitionColumnName that + will be used for proceeding range partitioning. Type: string (or Expression with resultType + string). + :type partition_upper_bound: object + :param partition_lower_bound: The minimum value of column specified in partitionColumnName that + will be used for proceeding range partitioning. Type: string (or Expression with resultType + string). + :type partition_lower_bound: object + :param max_partitions_number: The maximum value of partitions the table will be split into. + Type: integer (or Expression with resultType string). + :type max_partitions_number: object + """ + + _attribute_map = { + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + 'max_partitions_number': {'key': 'maxPartitionsNumber', 'type': 'object'}, + } + + def __init__( + self, + *, + partition_column_name: Optional[object] = None, + partition_upper_bound: Optional[object] = None, + partition_lower_bound: Optional[object] = None, + max_partitions_number: Optional[object] = None, + **kwargs + ): + super(SapTablePartitionSettings, self).__init__(**kwargs) + self.partition_column_name = partition_column_name + self.partition_upper_bound = partition_upper_bound + self.partition_lower_bound = partition_lower_bound + self.max_partitions_number = max_partitions_number + + class SapTableResourceDataset(Dataset): """SAP Table Resource properties. @@ -18804,6 +29096,215 @@ def __init__( self.table_name = table_name +class SapTableSource(TabularSource): + """A copy activity source for SAP Table source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param row_count: The number of rows to be retrieved. Type: integer(or Expression with + resultType integer). + :type row_count: object + :param row_skips: The number of rows that will be skipped. Type: integer (or Expression with + resultType integer). + :type row_skips: object + :param rfc_table_fields: The fields of the SAP table that will be retrieved. For example, + column0, column1. Type: string (or Expression with resultType string). + :type rfc_table_fields: object + :param rfc_table_options: The options for the filtering of the SAP Table. For example, COLUMN0 + EQ SOME VALUE. Type: string (or Expression with resultType string). + :type rfc_table_options: object + :param batch_size: Specifies the maximum number of rows that will be retrieved at a time when + retrieving data from SAP Table. Type: integer (or Expression with resultType integer). + :type batch_size: object + :param custom_rfc_read_table_function_module: Specifies the custom RFC function module that + will be used to read data from SAP Table. Type: string (or Expression with resultType string). + :type custom_rfc_read_table_function_module: object + :param partition_option: The partition mechanism that will be used for SAP table read in + parallel. Possible values include: "None", "PartitionOnInt", "PartitionOnCalendarYear", + "PartitionOnCalendarMonth", "PartitionOnCalendarDate", "PartitionOnTime". + :type partition_option: str or ~azure.synapse.artifacts.models.SapTablePartitionOption + :param partition_settings: The settings that will be leveraged for SAP table source + partitioning. + :type partition_settings: ~azure.synapse.artifacts.models.SapTablePartitionSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'row_count': {'key': 'rowCount', 'type': 'object'}, + 'row_skips': {'key': 'rowSkips', 'type': 'object'}, + 'rfc_table_fields': {'key': 'rfcTableFields', 'type': 'object'}, + 'rfc_table_options': {'key': 'rfcTableOptions', 'type': 'object'}, + 'batch_size': {'key': 'batchSize', 'type': 'object'}, + 'custom_rfc_read_table_function_module': {'key': 'customRfcReadTableFunctionModule', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'SapTablePartitionSettings'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + row_count: Optional[object] = None, + row_skips: Optional[object] = None, + rfc_table_fields: Optional[object] = None, + rfc_table_options: Optional[object] = None, + batch_size: Optional[object] = None, + custom_rfc_read_table_function_module: Optional[object] = None, + partition_option: Optional[Union[str, "SapTablePartitionOption"]] = None, + partition_settings: Optional["SapTablePartitionSettings"] = None, + **kwargs + ): + super(SapTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'SapTableSource' # type: str + self.row_count = row_count + self.row_skips = row_skips + self.rfc_table_fields = rfc_table_fields + self.rfc_table_options = rfc_table_options + self.batch_size = batch_size + self.custom_rfc_read_table_function_module = custom_rfc_read_table_function_module + self.partition_option = partition_option + self.partition_settings = partition_settings + + +class ScheduleTrigger(MultiplePipelineTrigger): + """Trigger that creates pipeline runs periodically, on schedule. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Trigger type.Constant filled by server. + :type type: str + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are + called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". + :vartype runtime_state: str or ~azure.synapse.artifacts.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the trigger. + :type annotations: list[object] + :param pipelines: Pipelines that need to be started. + :type pipelines: list[~azure.synapse.artifacts.models.TriggerPipelineReference] + :param recurrence: Required. Recurrence schedule configuration. + :type recurrence: ~azure.synapse.artifacts.models.ScheduleTriggerRecurrence + """ + + _validation = { + 'type': {'required': True}, + 'runtime_state': {'readonly': True}, + 'recurrence': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, + 'recurrence': {'key': 'typeProperties.recurrence', 'type': 'ScheduleTriggerRecurrence'}, + } + + def __init__( + self, + *, + recurrence: "ScheduleTriggerRecurrence", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + annotations: Optional[List[object]] = None, + pipelines: Optional[List["TriggerPipelineReference"]] = None, + **kwargs + ): + super(ScheduleTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, pipelines=pipelines, **kwargs) + self.type = 'ScheduleTrigger' # type: str + self.recurrence = recurrence + + +class ScheduleTriggerRecurrence(msrest.serialization.Model): + """The workflow trigger recurrence. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param frequency: The frequency. Possible values include: "NotSpecified", "Minute", "Hour", + "Day", "Week", "Month", "Year". + :type frequency: str or ~azure.synapse.artifacts.models.RecurrenceFrequency + :param interval: The interval. + :type interval: int + :param start_time: The start time. + :type start_time: ~datetime.datetime + :param end_time: The end time. + :type end_time: ~datetime.datetime + :param time_zone: The time zone. + :type time_zone: str + :param schedule: The recurrence schedule. + :type schedule: ~azure.synapse.artifacts.models.RecurrenceSchedule + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'frequency': {'key': 'frequency', 'type': 'str'}, + 'interval': {'key': 'interval', 'type': 'int'}, + 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, + 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, + 'time_zone': {'key': 'timeZone', 'type': 'str'}, + 'schedule': {'key': 'schedule', 'type': 'RecurrenceSchedule'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + frequency: Optional[Union[str, "RecurrenceFrequency"]] = None, + interval: Optional[int] = None, + start_time: Optional[datetime.datetime] = None, + end_time: Optional[datetime.datetime] = None, + time_zone: Optional[str] = None, + schedule: Optional["RecurrenceSchedule"] = None, + **kwargs + ): + super(ScheduleTriggerRecurrence, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.frequency = frequency + self.interval = interval + self.start_time = start_time + self.end_time = end_time + self.time_zone = time_zone + self.schedule = schedule + + class ScriptAction(msrest.serialization.Model): """Custom script action to run on HDI ondemand cluster once it's up. @@ -18842,43 +29343,124 @@ def __init__( parameters: Optional[str] = None, **kwargs ): - super(ScriptAction, self).__init__(**kwargs) - self.name = name - self.uri = uri - self.roles = roles - self.parameters = parameters + super(ScriptAction, self).__init__(**kwargs) + self.name = name + self.uri = uri + self.roles = roles + self.parameters = parameters + + +class SecureString(SecretBase): + """Azure Synapse secure string definition. The string value will be masked with asterisks '*' during Get or List API calls. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Type of the secret.Constant filled by server. + :type type: str + :param value: Required. Value of secure string. + :type value: str + """ + + _validation = { + 'type': {'required': True}, + 'value': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'str'}, + } + + def __init__( + self, + *, + value: str, + **kwargs + ): + super(SecureString, self).__init__(**kwargs) + self.type = 'SecureString' # type: str + self.value = value + + +class SelfDependencyTumblingWindowTriggerReference(DependencyReference): + """Self referenced tumbling window trigger dependency. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. The type of dependency reference.Constant filled by server. + :type type: str + :param offset: Required. Timespan applied to the start time of a tumbling window when + evaluating dependency. + :type offset: str + :param size: The size of the window when evaluating the dependency. If undefined the frequency + of the tumbling window will be used. + :type size: str + """ + + _validation = { + 'type': {'required': True}, + 'offset': {'required': True, 'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, + 'size': {'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'offset': {'key': 'offset', 'type': 'str'}, + 'size': {'key': 'size', 'type': 'str'}, + } + + def __init__( + self, + *, + offset: str, + size: Optional[str] = None, + **kwargs + ): + super(SelfDependencyTumblingWindowTriggerReference, self).__init__(**kwargs) + self.type = 'SelfDependencyTumblingWindowTriggerReference' # type: str + self.offset = offset + self.size = size -class SecureString(SecretBase): - """Azure Synapse secure string definition. The string value will be masked with asterisks '*' during Get or List API calls. +class SelfHostedIntegrationRuntime(IntegrationRuntime): + """Self-hosted integration runtime. All required parameters must be populated in order to send to Azure. - :param type: Required. Type of the secret.Constant filled by server. - :type type: str - :param value: Required. Value of secure string. - :type value: str + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of integration runtime.Constant filled by server. Possible values + include: "Managed", "SelfHosted". + :type type: str or ~azure.synapse.artifacts.models.IntegrationRuntimeType + :param description: Integration runtime description. + :type description: str + :param linked_info: The base definition of a linked integration runtime. + :type linked_info: ~azure.synapse.artifacts.models.LinkedIntegrationRuntimeType """ _validation = { 'type': {'required': True}, - 'value': {'required': True}, } _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'linked_info': {'key': 'typeProperties.linkedInfo', 'type': 'LinkedIntegrationRuntimeType'}, } def __init__( self, *, - value: str, + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + linked_info: Optional["LinkedIntegrationRuntimeType"] = None, **kwargs ): - super(SecureString, self).__init__(**kwargs) - self.type = 'SecureString' # type: str - self.value = value + super(SelfHostedIntegrationRuntime, self).__init__(additional_properties=additional_properties, description=description, **kwargs) + self.type = 'SelfHosted' # type: str + self.linked_info = linked_info class ServiceNowLinkedService(LinkedService): @@ -19058,6 +29640,63 @@ def __init__( self.table_name = table_name +class ServiceNowSource(TabularSource): + """A copy activity ServiceNow server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(ServiceNowSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'ServiceNowSource' # type: str + self.query = query + + class SetVariableActivity(Activity): """Set value for a Variable. @@ -19116,6 +29755,113 @@ def __init__( self.value = value +class SftpLocation(DatasetLocation): + """The location of SFTP dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location.Constant filled by server. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + folder_path: Optional[object] = None, + file_name: Optional[object] = None, + **kwargs + ): + super(SftpLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) + self.type = 'SftpLocation' # type: str + + +class SftpReadSettings(StoreReadSettings): + """Sftp read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The read setting type.Constant filled by server. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :type recursive: object + :param wildcard_folder_path: Sftp wildcardFolderPath. Type: string (or Expression with + resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Sftp wildcardFileName. Type: string (or Expression with resultType + string). + :type wildcard_file_name: object + :param modified_datetime_start: The start of file's modified datetime. Type: string (or + Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression + with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + max_concurrent_connections: Optional[object] = None, + recursive: Optional[object] = None, + wildcard_folder_path: Optional[object] = None, + wildcard_file_name: Optional[object] = None, + modified_datetime_start: Optional[object] = None, + modified_datetime_end: Optional[object] = None, + **kwargs + ): + super(SftpReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'SftpReadSettings' # type: str + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end + + class SftpServerLinkedService(LinkedService): """A linked service for an SSH File Transfer Protocol (SFTP) server. @@ -19234,6 +29980,52 @@ def __init__( self.host_key_fingerprint = host_key_fingerprint +class SftpWriteSettings(StoreWriteSettings): + """Sftp write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The write setting type.Constant filled by server. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + :param operation_timeout: Specifies the timeout for writing each chunk to SFTP server. Default + value: 01:00:00 (one hour). Type: string (or Expression with resultType string). + :type operation_timeout: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'operation_timeout': {'key': 'operationTimeout', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + max_concurrent_connections: Optional[object] = None, + copy_behavior: Optional[object] = None, + operation_timeout: Optional[object] = None, + **kwargs + ): + super(SftpWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + self.type = 'SftpWriteSettings' # type: str + self.operation_timeout = operation_timeout + + class ShopifyLinkedService(LinkedService): """Shopify Service linked service. @@ -19350,41 +30142,130 @@ class ShopifyObjectDataset(Dataset): :type table_name: object """ - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__( + self, + *, + linked_service_name: "LinkedServiceReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + table_name: Optional[object] = None, + **kwargs + ): + super(ShopifyObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'ShopifyObject' # type: str + self.table_name = table_name + + +class ShopifySource(TabularSource): + """A copy activity Shopify Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(ShopifySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'ShopifySource' # type: str + self.query = query + + +class Sku(msrest.serialization.Model): + """SQL pool SKU. + + :param tier: The service tier. + :type tier: str + :param name: The SKU name. + :type name: str + :param capacity: If the SKU supports scale out/in then the capacity integer should be included. + If scale out/in is not possible for the resource this may be omitted. + :type capacity: int + """ _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'tier': {'key': 'tier', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'capacity': {'key': 'capacity', 'type': 'int'}, } def __init__( self, *, - linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, + tier: Optional[str] = None, + name: Optional[str] = None, + capacity: Optional[int] = None, **kwargs ): - super(ShopifyObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'ShopifyObject' # type: str - self.table_name = table_name + super(Sku, self).__init__(**kwargs) + self.tier = tier + self.name = name + self.capacity = capacity class SparkBatchJob(msrest.serialization.Model): @@ -19615,20 +30496,22 @@ def __init__( self.job_properties = job_properties -class SparkJobDefinitionResource(SubResource): +class SparkJobDefinitionResource(AzureEntityResource): """Spark job definition resource type. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :ivar id: The resource identifier. + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. :vartype id: str - :ivar name: The resource name. + :ivar name: The name of the resource. :vartype name: str - :ivar type: The resource type. + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. :vartype type: str - :ivar etag: Etag identifies change in the resource. + :ivar etag: Resource Etag. :vartype etag: str :param properties: Required. Properties of spark job definition. :type properties: ~azure.synapse.artifacts.models.SparkJobDefinition @@ -20107,147 +30990,655 @@ class SparkScheduler(msrest.serialization.Model): """ _attribute_map = { - 'submitted_at': {'key': 'submittedAt', 'type': 'iso-8601'}, - 'scheduled_at': {'key': 'scheduledAt', 'type': 'iso-8601'}, - 'ended_at': {'key': 'endedAt', 'type': 'iso-8601'}, - 'cancellation_requested_at': {'key': 'cancellationRequestedAt', 'type': 'iso-8601'}, - 'current_state': {'key': 'currentState', 'type': 'str'}, + 'submitted_at': {'key': 'submittedAt', 'type': 'iso-8601'}, + 'scheduled_at': {'key': 'scheduledAt', 'type': 'iso-8601'}, + 'ended_at': {'key': 'endedAt', 'type': 'iso-8601'}, + 'cancellation_requested_at': {'key': 'cancellationRequestedAt', 'type': 'iso-8601'}, + 'current_state': {'key': 'currentState', 'type': 'str'}, + } + + def __init__( + self, + *, + submitted_at: Optional[datetime.datetime] = None, + scheduled_at: Optional[datetime.datetime] = None, + ended_at: Optional[datetime.datetime] = None, + cancellation_requested_at: Optional[datetime.datetime] = None, + current_state: Optional[Union[str, "SchedulerCurrentState"]] = None, + **kwargs + ): + super(SparkScheduler, self).__init__(**kwargs) + self.submitted_at = submitted_at + self.scheduled_at = scheduled_at + self.ended_at = ended_at + self.cancellation_requested_at = cancellation_requested_at + self.current_state = current_state + + +class SparkServiceError(msrest.serialization.Model): + """SparkServiceError. + + :param message: + :type message: str + :param error_code: + :type error_code: str + :param source: Possible values include: "System", "User", "Unknown", "Dependency". + :type source: str or ~azure.synapse.artifacts.models.SparkErrorSource + """ + + _attribute_map = { + 'message': {'key': 'message', 'type': 'str'}, + 'error_code': {'key': 'errorCode', 'type': 'str'}, + 'source': {'key': 'source', 'type': 'str'}, + } + + def __init__( + self, + *, + message: Optional[str] = None, + error_code: Optional[str] = None, + source: Optional[Union[str, "SparkErrorSource"]] = None, + **kwargs + ): + super(SparkServiceError, self).__init__(**kwargs) + self.message = message + self.error_code = error_code + self.source = source + + +class SparkServicePlugin(msrest.serialization.Model): + """SparkServicePlugin. + + :param preparation_started_at: + :type preparation_started_at: ~datetime.datetime + :param resource_acquisition_started_at: + :type resource_acquisition_started_at: ~datetime.datetime + :param submission_started_at: + :type submission_started_at: ~datetime.datetime + :param monitoring_started_at: + :type monitoring_started_at: ~datetime.datetime + :param cleanup_started_at: + :type cleanup_started_at: ~datetime.datetime + :param current_state: Possible values include: "Preparation", "ResourceAcquisition", "Queued", + "Submission", "Monitoring", "Cleanup", "Ended". + :type current_state: str or ~azure.synapse.artifacts.models.PluginCurrentState + """ + + _attribute_map = { + 'preparation_started_at': {'key': 'preparationStartedAt', 'type': 'iso-8601'}, + 'resource_acquisition_started_at': {'key': 'resourceAcquisitionStartedAt', 'type': 'iso-8601'}, + 'submission_started_at': {'key': 'submissionStartedAt', 'type': 'iso-8601'}, + 'monitoring_started_at': {'key': 'monitoringStartedAt', 'type': 'iso-8601'}, + 'cleanup_started_at': {'key': 'cleanupStartedAt', 'type': 'iso-8601'}, + 'current_state': {'key': 'currentState', 'type': 'str'}, + } + + def __init__( + self, + *, + preparation_started_at: Optional[datetime.datetime] = None, + resource_acquisition_started_at: Optional[datetime.datetime] = None, + submission_started_at: Optional[datetime.datetime] = None, + monitoring_started_at: Optional[datetime.datetime] = None, + cleanup_started_at: Optional[datetime.datetime] = None, + current_state: Optional[Union[str, "PluginCurrentState"]] = None, + **kwargs + ): + super(SparkServicePlugin, self).__init__(**kwargs) + self.preparation_started_at = preparation_started_at + self.resource_acquisition_started_at = resource_acquisition_started_at + self.submission_started_at = submission_started_at + self.monitoring_started_at = monitoring_started_at + self.cleanup_started_at = cleanup_started_at + self.current_state = current_state + + +class SparkSource(TabularSource): + """A copy activity Spark Server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(SparkSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'SparkSource' # type: str + self.query = query + + +class SqlConnection(msrest.serialization.Model): + """The connection used to execute the SQL script. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The type of the connection. Possible values include: "SqlOnDemand", + "SqlPool". + :type type: str or ~azure.synapse.artifacts.models.SqlConnectionType + :param name: Required. The identifier of the connection. + :type name: str + """ + + _validation = { + 'type': {'required': True}, + 'name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + } + + def __init__( + self, + *, + type: Union[str, "SqlConnectionType"], + name: str, + additional_properties: Optional[Dict[str, object]] = None, + **kwargs + ): + super(SqlConnection, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = type + self.name = name + + +class SqlDWSink(CopySink): + """A copy activity SQL Data Warehouse sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType + string). + :type pre_copy_script: object + :param allow_poly_base: Indicates to use PolyBase to copy data into SQL Data Warehouse when + applicable. Type: boolean (or Expression with resultType boolean). + :type allow_poly_base: object + :param poly_base_settings: Specifies PolyBase-related settings when allowPolyBase is true. + :type poly_base_settings: ~azure.synapse.artifacts.models.PolybaseSettings + :param allow_copy_command: Indicates to use Copy Command to copy data into SQL Data Warehouse. + Type: boolean (or Expression with resultType boolean). + :type allow_copy_command: object + :param copy_command_settings: Specifies Copy Command related settings when allowCopyCommand is + true. + :type copy_command_settings: ~azure.synapse.artifacts.models.DWCopyCommandSettings + :param table_option: The option to handle sink table, such as autoCreate. For now only + 'autoCreate' value is supported. Type: string (or Expression with resultType string). + :type table_option: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'allow_poly_base': {'key': 'allowPolyBase', 'type': 'object'}, + 'poly_base_settings': {'key': 'polyBaseSettings', 'type': 'PolybaseSettings'}, + 'allow_copy_command': {'key': 'allowCopyCommand', 'type': 'object'}, + 'copy_command_settings': {'key': 'copyCommandSettings', 'type': 'DWCopyCommandSettings'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, } def __init__( self, *, - submitted_at: Optional[datetime.datetime] = None, - scheduled_at: Optional[datetime.datetime] = None, - ended_at: Optional[datetime.datetime] = None, - cancellation_requested_at: Optional[datetime.datetime] = None, - current_state: Optional[Union[str, "SchedulerCurrentState"]] = None, + additional_properties: Optional[Dict[str, object]] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + pre_copy_script: Optional[object] = None, + allow_poly_base: Optional[object] = None, + poly_base_settings: Optional["PolybaseSettings"] = None, + allow_copy_command: Optional[object] = None, + copy_command_settings: Optional["DWCopyCommandSettings"] = None, + table_option: Optional[object] = None, **kwargs ): - super(SparkScheduler, self).__init__(**kwargs) - self.submitted_at = submitted_at - self.scheduled_at = scheduled_at - self.ended_at = ended_at - self.cancellation_requested_at = cancellation_requested_at - self.current_state = current_state + super(SqlDWSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'SqlDWSink' # type: str + self.pre_copy_script = pre_copy_script + self.allow_poly_base = allow_poly_base + self.poly_base_settings = poly_base_settings + self.allow_copy_command = allow_copy_command + self.copy_command_settings = copy_command_settings + self.table_option = table_option -class SparkServiceError(msrest.serialization.Model): - """SparkServiceError. +class SqlDWSource(TabularSource): + """A copy activity SQL Data Warehouse source. - :param message: - :type message: str - :param error_code: - :type error_code: str - :param source: Possible values include: "System", "User", "Unknown", "Dependency". - :type source: str or ~azure.synapse.artifacts.models.SparkErrorSource + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param sql_reader_query: SQL Data Warehouse reader query. Type: string (or Expression with + resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Data Warehouse + source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression + with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored procedure parameters. + Example: "{Parameter1: {value: "1", type: "int"}}". Type: object (or Expression with resultType + object), itemType: StoredProcedureParameter. + :type stored_procedure_parameters: object """ + _validation = { + 'type': {'required': True}, + } + _attribute_map = { - 'message': {'key': 'message', 'type': 'str'}, - 'error_code': {'key': 'errorCode', 'type': 'str'}, - 'source': {'key': 'source', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': 'object'}, } def __init__( self, *, - message: Optional[str] = None, - error_code: Optional[str] = None, - source: Optional[Union[str, "SparkErrorSource"]] = None, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + sql_reader_query: Optional[object] = None, + sql_reader_stored_procedure_name: Optional[object] = None, + stored_procedure_parameters: Optional[object] = None, **kwargs ): - super(SparkServiceError, self).__init__(**kwargs) - self.message = message - self.error_code = error_code - self.source = source + super(SqlDWSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'SqlDWSource' # type: str + self.sql_reader_query = sql_reader_query + self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name + self.stored_procedure_parameters = stored_procedure_parameters -class SparkServicePlugin(msrest.serialization.Model): - """SparkServicePlugin. +class SqlMISink(CopySink): + """A copy activity Azure SQL Managed Instance sink. - :param preparation_started_at: - :type preparation_started_at: ~datetime.datetime - :param resource_acquisition_started_at: - :type resource_acquisition_started_at: ~datetime.datetime - :param submission_started_at: - :type submission_started_at: ~datetime.datetime - :param monitoring_started_at: - :type monitoring_started_at: ~datetime.datetime - :param cleanup_started_at: - :type cleanup_started_at: ~datetime.datetime - :param current_state: Possible values include: "Preparation", "ResourceAcquisition", "Queued", - "Submission", "Monitoring", "Cleanup", "Ended". - :type current_state: str or ~azure.synapse.artifacts.models.PluginCurrentState + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or + Expression with resultType string). + :type sql_writer_stored_procedure_name: object + :param sql_writer_table_type: SQL writer table type. Type: string (or Expression with + resultType string). + :type sql_writer_table_type: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType + string). + :type pre_copy_script: object + :param stored_procedure_parameters: SQL stored procedure parameters. + :type stored_procedure_parameters: dict[str, + ~azure.synapse.artifacts.models.StoredProcedureParameter] + :param stored_procedure_table_type_parameter_name: The stored procedure parameter name of the + table type. Type: string (or Expression with resultType string). + :type stored_procedure_table_type_parameter_name: object + :param table_option: The option to handle sink table, such as autoCreate. For now only + 'autoCreate' value is supported. Type: string (or Expression with resultType string). + :type table_option: object """ + _validation = { + 'type': {'required': True}, + } + _attribute_map = { - 'preparation_started_at': {'key': 'preparationStartedAt', 'type': 'iso-8601'}, - 'resource_acquisition_started_at': {'key': 'resourceAcquisitionStartedAt', 'type': 'iso-8601'}, - 'submission_started_at': {'key': 'submissionStartedAt', 'type': 'iso-8601'}, - 'monitoring_started_at': {'key': 'monitoringStartedAt', 'type': 'iso-8601'}, - 'cleanup_started_at': {'key': 'cleanupStartedAt', 'type': 'iso-8601'}, - 'current_state': {'key': 'currentState', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, + 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, } def __init__( self, *, - preparation_started_at: Optional[datetime.datetime] = None, - resource_acquisition_started_at: Optional[datetime.datetime] = None, - submission_started_at: Optional[datetime.datetime] = None, - monitoring_started_at: Optional[datetime.datetime] = None, - cleanup_started_at: Optional[datetime.datetime] = None, - current_state: Optional[Union[str, "PluginCurrentState"]] = None, + additional_properties: Optional[Dict[str, object]] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + sql_writer_stored_procedure_name: Optional[object] = None, + sql_writer_table_type: Optional[object] = None, + pre_copy_script: Optional[object] = None, + stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None, + stored_procedure_table_type_parameter_name: Optional[object] = None, + table_option: Optional[object] = None, **kwargs ): - super(SparkServicePlugin, self).__init__(**kwargs) - self.preparation_started_at = preparation_started_at - self.resource_acquisition_started_at = resource_acquisition_started_at - self.submission_started_at = submission_started_at - self.monitoring_started_at = monitoring_started_at - self.cleanup_started_at = cleanup_started_at - self.current_state = current_state + super(SqlMISink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'SqlMISink' # type: str + self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name + self.sql_writer_table_type = sql_writer_table_type + self.pre_copy_script = pre_copy_script + self.stored_procedure_parameters = stored_procedure_parameters + self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name + self.table_option = table_option -class SqlConnection(msrest.serialization.Model): - """The connection used to execute the SQL script. +class SqlMISource(TabularSource): + """A copy activity Azure SQL Managed Instance source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. The type of the connection. Possible values include: "SqlOnDemand", - "SqlPool". - :type type: str or ~azure.synapse.artifacts.models.SqlConnectionType - :param name: Required. The identifier of the connection. - :type name: str + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for a Azure SQL Managed + Instance source. This cannot be used at the same time as SqlReaderQuery. Type: string (or + Expression with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored procedure parameters. + Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.synapse.artifacts.models.StoredProcedureParameter] + :param produce_additional_types: Which additional types to produce. + :type produce_additional_types: object """ _validation = { 'type': {'required': True}, - 'name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, } def __init__( self, *, - type: Union[str, "SqlConnectionType"], - name: str, additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + sql_reader_query: Optional[object] = None, + sql_reader_stored_procedure_name: Optional[object] = None, + stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None, + produce_additional_types: Optional[object] = None, **kwargs ): - super(SqlConnection, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.type = type - self.name = name + super(SqlMISource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'SqlMISource' # type: str + self.sql_reader_query = sql_reader_query + self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name + self.stored_procedure_parameters = stored_procedure_parameters + self.produce_additional_types = produce_additional_types + + +class SqlPool(TrackedResource): + """A SQL Analytics pool. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + :param tags: A set of tags. Resource tags. + :type tags: dict[str, str] + :param location: Required. The geo-location where the resource lives. + :type location: str + :param sku: SQL pool SKU. + :type sku: ~azure.synapse.artifacts.models.Sku + :param max_size_bytes: Maximum size in bytes. + :type max_size_bytes: long + :param collation: Collation mode. + :type collation: str + :param source_database_id: Source database to create from. + :type source_database_id: str + :param recoverable_database_id: Backup database to restore from. + :type recoverable_database_id: str + :param provisioning_state: Resource state. + :type provisioning_state: str + :param status: Resource status. + :type status: str + :param restore_point_in_time: Snapshot time to restore. + :type restore_point_in_time: ~datetime.datetime + :param create_mode: What is this?. + :type create_mode: str + :param creation_date: Date the SQL pool was created. + :type creation_date: ~datetime.datetime + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'location': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'location': {'key': 'location', 'type': 'str'}, + 'sku': {'key': 'sku', 'type': 'Sku'}, + 'max_size_bytes': {'key': 'properties.maxSizeBytes', 'type': 'long'}, + 'collation': {'key': 'properties.collation', 'type': 'str'}, + 'source_database_id': {'key': 'properties.sourceDatabaseId', 'type': 'str'}, + 'recoverable_database_id': {'key': 'properties.recoverableDatabaseId', 'type': 'str'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + 'status': {'key': 'properties.status', 'type': 'str'}, + 'restore_point_in_time': {'key': 'properties.restorePointInTime', 'type': 'iso-8601'}, + 'create_mode': {'key': 'properties.createMode', 'type': 'str'}, + 'creation_date': {'key': 'properties.creationDate', 'type': 'iso-8601'}, + } + + def __init__( + self, + *, + location: str, + tags: Optional[Dict[str, str]] = None, + sku: Optional["Sku"] = None, + max_size_bytes: Optional[int] = None, + collation: Optional[str] = None, + source_database_id: Optional[str] = None, + recoverable_database_id: Optional[str] = None, + provisioning_state: Optional[str] = None, + status: Optional[str] = None, + restore_point_in_time: Optional[datetime.datetime] = None, + create_mode: Optional[str] = None, + creation_date: Optional[datetime.datetime] = None, + **kwargs + ): + super(SqlPool, self).__init__(tags=tags, location=location, **kwargs) + self.sku = sku + self.max_size_bytes = max_size_bytes + self.collation = collation + self.source_database_id = source_database_id + self.recoverable_database_id = recoverable_database_id + self.provisioning_state = provisioning_state + self.status = status + self.restore_point_in_time = restore_point_in_time + self.create_mode = create_mode + self.creation_date = creation_date + + +class SqlPoolInfoListResult(msrest.serialization.Model): + """List of SQL pools. + + :param next_link: Link to the next page of results. + :type next_link: str + :param value: List of SQL pools. + :type value: list[~azure.synapse.artifacts.models.SqlPool] + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'value': {'key': 'value', 'type': '[SqlPool]'}, + } + + def __init__( + self, + *, + next_link: Optional[str] = None, + value: Optional[List["SqlPool"]] = None, + **kwargs + ): + super(SqlPoolInfoListResult, self).__init__(**kwargs) + self.next_link = next_link + self.value = value class SqlPoolReference(msrest.serialization.Model): @@ -20465,20 +31856,22 @@ def __init__( self.language = language -class SqlScriptResource(SubResource): +class SqlScriptResource(AzureEntityResource): """Sql Script resource type. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :ivar id: The resource identifier. + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. :vartype id: str - :ivar name: The resource name. + :ivar name: The name of the resource. :vartype name: str - :ivar type: The resource type. + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. :vartype type: str - :ivar etag: Etag identifies change in the resource. + :ivar etag: Resource Etag. :vartype etag: str :param properties: Required. Properties of sql script. :type properties: ~azure.synapse.artifacts.models.SqlScript @@ -20614,6 +32007,173 @@ def __init__( self.encrypted_credential = encrypted_credential +class SqlServerSink(CopySink): + """A copy activity SQL server sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or + Expression with resultType string). + :type sql_writer_stored_procedure_name: object + :param sql_writer_table_type: SQL writer table type. Type: string (or Expression with + resultType string). + :type sql_writer_table_type: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType + string). + :type pre_copy_script: object + :param stored_procedure_parameters: SQL stored procedure parameters. + :type stored_procedure_parameters: dict[str, + ~azure.synapse.artifacts.models.StoredProcedureParameter] + :param stored_procedure_table_type_parameter_name: The stored procedure parameter name of the + table type. Type: string (or Expression with resultType string). + :type stored_procedure_table_type_parameter_name: object + :param table_option: The option to handle sink table, such as autoCreate. For now only + 'autoCreate' value is supported. Type: string (or Expression with resultType string). + :type table_option: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, + 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + sql_writer_stored_procedure_name: Optional[object] = None, + sql_writer_table_type: Optional[object] = None, + pre_copy_script: Optional[object] = None, + stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None, + stored_procedure_table_type_parameter_name: Optional[object] = None, + table_option: Optional[object] = None, + **kwargs + ): + super(SqlServerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'SqlServerSink' # type: str + self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name + self.sql_writer_table_type = sql_writer_table_type + self.pre_copy_script = pre_copy_script + self.stored_procedure_parameters = stored_procedure_parameters + self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name + self.table_option = table_option + + +class SqlServerSource(TabularSource): + """A copy activity SQL server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database + source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression + with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored procedure parameters. + Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.synapse.artifacts.models.StoredProcedureParameter] + :param produce_additional_types: Which additional types to produce. + :type produce_additional_types: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + sql_reader_query: Optional[object] = None, + sql_reader_stored_procedure_name: Optional[object] = None, + stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None, + produce_additional_types: Optional[object] = None, + **kwargs + ): + super(SqlServerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'SqlServerSource' # type: str + self.sql_reader_query = sql_reader_query + self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name + self.stored_procedure_parameters = stored_procedure_parameters + self.produce_additional_types = produce_additional_types + + class SqlServerStoredProcedureActivity(ExecutionActivity): """SQL stored procedure activity type. @@ -20655,114 +32215,276 @@ class SqlServerStoredProcedureActivity(ExecutionActivity): 'additional_properties': {'key': '', 'type': '{object}'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'stored_procedure_name': {'key': 'typeProperties.storedProcedureName', 'type': 'object'}, - 'stored_procedure_parameters': {'key': 'typeProperties.storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'stored_procedure_name': {'key': 'typeProperties.storedProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'typeProperties.storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + } + + def __init__( + self, + *, + name: str, + stored_procedure_name: object, + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + depends_on: Optional[List["ActivityDependency"]] = None, + user_properties: Optional[List["UserProperty"]] = None, + linked_service_name: Optional["LinkedServiceReference"] = None, + policy: Optional["ActivityPolicy"] = None, + stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None, + **kwargs + ): + super(SqlServerStoredProcedureActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.type = 'SqlServerStoredProcedure' # type: str + self.stored_procedure_name = stored_procedure_name + self.stored_procedure_parameters = stored_procedure_parameters + + +class SqlServerTableDataset(Dataset): + """The on-premises SQL Server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: This property will be retired. Please consider using schema + table + properties instead. + :type table_name: object + :param schema_type_properties_schema: The schema name of the SQL Server dataset. Type: string + (or Expression with resultType string). + :type schema_type_properties_schema: object + :param table: The table name of the SQL Server dataset. Type: string (or Expression with + resultType string). + :type table: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__( + self, + *, + linked_service_name: "LinkedServiceReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + table_name: Optional[object] = None, + schema_type_properties_schema: Optional[object] = None, + table: Optional[object] = None, + **kwargs + ): + super(SqlServerTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'SqlServerTable' # type: str + self.table_name = table_name + self.schema_type_properties_schema = schema_type_properties_schema + self.table = table + + +class SqlSink(CopySink): + """A copy activity SQL sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or + Expression with resultType string). + :type sql_writer_stored_procedure_name: object + :param sql_writer_table_type: SQL writer table type. Type: string (or Expression with + resultType string). + :type sql_writer_table_type: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType + string). + :type pre_copy_script: object + :param stored_procedure_parameters: SQL stored procedure parameters. + :type stored_procedure_parameters: dict[str, + ~azure.synapse.artifacts.models.StoredProcedureParameter] + :param stored_procedure_table_type_parameter_name: The stored procedure parameter name of the + table type. Type: string (or Expression with resultType string). + :type stored_procedure_table_type_parameter_name: object + :param table_option: The option to handle sink table, such as autoCreate. For now only + 'autoCreate' value is supported. Type: string (or Expression with resultType string). + :type table_option: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, + 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, } def __init__( self, *, - name: str, - stored_procedure_name: object, additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - depends_on: Optional[List["ActivityDependency"]] = None, - user_properties: Optional[List["UserProperty"]] = None, - linked_service_name: Optional["LinkedServiceReference"] = None, - policy: Optional["ActivityPolicy"] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + sql_writer_stored_procedure_name: Optional[object] = None, + sql_writer_table_type: Optional[object] = None, + pre_copy_script: Optional[object] = None, stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None, + stored_procedure_table_type_parameter_name: Optional[object] = None, + table_option: Optional[object] = None, **kwargs ): - super(SqlServerStoredProcedureActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type = 'SqlServerStoredProcedure' # type: str - self.stored_procedure_name = stored_procedure_name + super(SqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'SqlSink' # type: str + self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name + self.sql_writer_table_type = sql_writer_table_type + self.pre_copy_script = pre_copy_script self.stored_procedure_parameters = stored_procedure_parameters + self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name + self.table_option = table_option -class SqlServerTableDataset(Dataset): - """The on-premises SQL Server dataset. +class SqlSource(TabularSource): + """A copy activity SQL source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table - properties instead. - :type table_name: object - :param schema_type_properties_schema: The schema name of the SQL Server dataset. Type: string - (or Expression with resultType string). - :type schema_type_properties_schema: object - :param table: The table name of the SQL Server dataset. Type: string (or Expression with - resultType string). - :type table: object + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database + source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression + with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored procedure parameters. + Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.synapse.artifacts.models.StoredProcedureParameter] """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, } def __init__( self, *, - linked_service_name: "LinkedServiceReference", additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, - schema_type_properties_schema: Optional[object] = None, - table: Optional[object] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + sql_reader_query: Optional[object] = None, + sql_reader_stored_procedure_name: Optional[object] = None, + stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None, **kwargs ): - super(SqlServerTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'SqlServerTable' # type: str - self.table_name = table_name - self.schema_type_properties_schema = schema_type_properties_schema - self.table = table + super(SqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'SqlSource' # type: str + self.sql_reader_query = sql_reader_query + self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name + self.stored_procedure_parameters = stored_procedure_parameters class SquareLinkedService(LinkedService): @@ -20930,6 +32652,63 @@ def __init__( self.table_name = table_name +class SquareSource(TabularSource): + """A copy activity Square Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(SquareSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'SquareSource' # type: str + self.query = query + + class SSISAccessCredential(msrest.serialization.Model): """SSIS access credential. @@ -21407,6 +33186,44 @@ def __init__( self.type = type +class SubResource(AzureEntityResource): + """Azure Synapse nested resource, which belongs to a workspace. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + :ivar etag: Resource Etag. + :vartype etag: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(SubResource, self).__init__(**kwargs) + + class SwitchActivity(Activity): """This activity evaluates an expression and executes activities under the cases property that correspond to the expression evaluation expected in the equals property. @@ -21591,6 +33408,62 @@ def __init__( self.encrypted_credential = encrypted_credential +class SybaseSource(TabularSource): + """A copy activity source for Sybase databases. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: Database query. Type: string (or Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(SybaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'SybaseSource' # type: str + self.query = query + + class SybaseTableDataset(Dataset): """The Sybase table dataset. @@ -21825,19 +33698,86 @@ class SynapseSparkJobReference(msrest.serialization.Model): _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, - 'reference_name': {'key': 'referenceName', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + } + + def __init__( + self, + *, + type: Union[str, "SparkJobReferenceType"], + reference_name: str, + **kwargs + ): + super(SynapseSparkJobReference, self).__init__(**kwargs) + self.type = type + self.reference_name = reference_name + + +class TabularTranslator(CopyTranslator): + """A copy activity tabular translator. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy translator type.Constant filled by server. + :type type: str + :param column_mappings: Column mappings. Example: "UserId: MyUserId, Group: MyGroup, Name: + MyName" Type: string (or Expression with resultType string). This property will be retired. + Please use mappings property. + :type column_mappings: object + :param schema_mapping: The schema mapping to map between tabular data and hierarchical data. + Example: {"Column1": "$.Column1", "Column2": "$.Column2.Property1", "Column3": + "$.Column2.Property2"}. Type: object (or Expression with resultType object). This property will + be retired. Please use mappings property. + :type schema_mapping: object + :param collection_reference: The JSON Path of the Nested Array that is going to do cross-apply. + Type: object (or Expression with resultType object). + :type collection_reference: object + :param map_complex_values_to_string: Whether to map complex (array and object) values to simple + strings in json format. Type: boolean (or Expression with resultType boolean). + :type map_complex_values_to_string: object + :param mappings: Column mappings with logical types. Tabular->tabular example: + [{"source":{"name":"CustomerName","type":"String"},"sink":{"name":"ClientName","type":"String"}},{"source":{"name":"CustomerAddress","type":"String"},"sink":{"name":"ClientAddress","type":"String"}}]. + Hierarchical->tabular example: + [{"source":{"path":"$.CustomerName","type":"String"},"sink":{"name":"ClientName","type":"String"}},{"source":{"path":"$.CustomerAddress","type":"String"},"sink":{"name":"ClientAddress","type":"String"}}]. + Type: object (or Expression with resultType object). + :type mappings: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'column_mappings': {'key': 'columnMappings', 'type': 'object'}, + 'schema_mapping': {'key': 'schemaMapping', 'type': 'object'}, + 'collection_reference': {'key': 'collectionReference', 'type': 'object'}, + 'map_complex_values_to_string': {'key': 'mapComplexValuesToString', 'type': 'object'}, + 'mappings': {'key': 'mappings', 'type': 'object'}, } def __init__( self, *, - type: Union[str, "SparkJobReferenceType"], - reference_name: str, + additional_properties: Optional[Dict[str, object]] = None, + column_mappings: Optional[object] = None, + schema_mapping: Optional[object] = None, + collection_reference: Optional[object] = None, + map_complex_values_to_string: Optional[object] = None, + mappings: Optional[object] = None, **kwargs ): - super(SynapseSparkJobReference, self).__init__(**kwargs) - self.type = type - self.reference_name = reference_name + super(TabularTranslator, self).__init__(additional_properties=additional_properties, **kwargs) + self.type = 'TabularTranslator' # type: str + self.column_mappings = column_mappings + self.schema_mapping = schema_mapping + self.collection_reference = collection_reference + self.map_complex_values_to_string = map_complex_values_to_string + self.mappings = mappings class TeradataLinkedService(LinkedService): @@ -21922,6 +33862,110 @@ def __init__( self.encrypted_credential = encrypted_credential +class TeradataPartitionSettings(msrest.serialization.Model): + """The settings that will be leveraged for teradata source partitioning. + + :param partition_column_name: The name of the column that will be used for proceeding range or + hash partitioning. Type: string (or Expression with resultType string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of column specified in partitionColumnName that + will be used for proceeding range partitioning. Type: string (or Expression with resultType + string). + :type partition_upper_bound: object + :param partition_lower_bound: The minimum value of column specified in partitionColumnName that + will be used for proceeding range partitioning. Type: string (or Expression with resultType + string). + :type partition_lower_bound: object + """ + + _attribute_map = { + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + } + + def __init__( + self, + *, + partition_column_name: Optional[object] = None, + partition_upper_bound: Optional[object] = None, + partition_lower_bound: Optional[object] = None, + **kwargs + ): + super(TeradataPartitionSettings, self).__init__(**kwargs) + self.partition_column_name = partition_column_name + self.partition_upper_bound = partition_upper_bound + self.partition_lower_bound = partition_lower_bound + + +class TeradataSource(TabularSource): + """A copy activity Teradata source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: Teradata query. Type: string (or Expression with resultType string). + :type query: object + :param partition_option: The partition mechanism that will be used for teradata read in + parallel. Possible values include: "None", "Hash", "DynamicRange". + :type partition_option: str or ~azure.synapse.artifacts.models.TeradataPartitionOption + :param partition_settings: The settings that will be leveraged for teradata source + partitioning. + :type partition_settings: ~azure.synapse.artifacts.models.TeradataPartitionSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'TeradataPartitionSettings'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + partition_option: Optional[Union[str, "TeradataPartitionOption"]] = None, + partition_settings: Optional["TeradataPartitionSettings"] = None, + **kwargs + ): + super(TeradataSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'TeradataSource' # type: str + self.query = query + self.partition_option = partition_option + self.partition_settings = partition_settings + + class TeradataTableDataset(Dataset): """The Teradata database dataset. @@ -21996,6 +34040,99 @@ def __init__( self.table = table +class TextFormat(DatasetStorageFormat): + """The data stored in text format. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage format.Constant filled by server. + :type type: str + :param serializer: Serializer. Type: string (or Expression with resultType string). + :type serializer: object + :param deserializer: Deserializer. Type: string (or Expression with resultType string). + :type deserializer: object + :param column_delimiter: The column delimiter. Type: string (or Expression with resultType + string). + :type column_delimiter: object + :param row_delimiter: The row delimiter. Type: string (or Expression with resultType string). + :type row_delimiter: object + :param escape_char: The escape character. Type: string (or Expression with resultType string). + :type escape_char: object + :param quote_char: The quote character. Type: string (or Expression with resultType string). + :type quote_char: object + :param null_value: The null value string. Type: string (or Expression with resultType string). + :type null_value: object + :param encoding_name: The code page name of the preferred encoding. If miss, the default value + is ΓÇ£utf-8ΓÇ¥, unless BOM denotes another Unicode encoding. Refer to the ΓÇ£NameΓÇ¥ column of + the table in the following link to set supported values: + https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with + resultType string). + :type encoding_name: object + :param treat_empty_as_null: Treat empty column values in the text file as null. The default + value is true. Type: boolean (or Expression with resultType boolean). + :type treat_empty_as_null: object + :param skip_line_count: The number of lines/rows to be skipped when parsing text files. The + default value is 0. Type: integer (or Expression with resultType integer). + :type skip_line_count: object + :param first_row_as_header: When used as input, treat the first row of data as headers. When + used as output,write the headers into the output as the first row of data. The default value is + false. Type: boolean (or Expression with resultType boolean). + :type first_row_as_header: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'serializer': {'key': 'serializer', 'type': 'object'}, + 'deserializer': {'key': 'deserializer', 'type': 'object'}, + 'column_delimiter': {'key': 'columnDelimiter', 'type': 'object'}, + 'row_delimiter': {'key': 'rowDelimiter', 'type': 'object'}, + 'escape_char': {'key': 'escapeChar', 'type': 'object'}, + 'quote_char': {'key': 'quoteChar', 'type': 'object'}, + 'null_value': {'key': 'nullValue', 'type': 'object'}, + 'encoding_name': {'key': 'encodingName', 'type': 'object'}, + 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, + 'skip_line_count': {'key': 'skipLineCount', 'type': 'object'}, + 'first_row_as_header': {'key': 'firstRowAsHeader', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + serializer: Optional[object] = None, + deserializer: Optional[object] = None, + column_delimiter: Optional[object] = None, + row_delimiter: Optional[object] = None, + escape_char: Optional[object] = None, + quote_char: Optional[object] = None, + null_value: Optional[object] = None, + encoding_name: Optional[object] = None, + treat_empty_as_null: Optional[object] = None, + skip_line_count: Optional[object] = None, + first_row_as_header: Optional[object] = None, + **kwargs + ): + super(TextFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs) + self.type = 'TextFormat' # type: str + self.column_delimiter = column_delimiter + self.row_delimiter = row_delimiter + self.escape_char = escape_char + self.quote_char = quote_char + self.null_value = null_value + self.encoding_name = encoding_name + self.treat_empty_as_null = treat_empty_as_null + self.skip_line_count = skip_line_count + self.first_row_as_header = first_row_as_header + + class TriggerDependencyProvisioningStatus(msrest.serialization.Model): """Defines the response of a provision trigger dependency operation. @@ -22029,6 +34166,45 @@ def __init__( self.provisioning_status = provisioning_status +class TriggerDependencyReference(DependencyReference): + """Trigger referenced dependency. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: TumblingWindowTriggerDependencyReference. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. The type of dependency reference.Constant filled by server. + :type type: str + :param reference_trigger: Required. Referenced trigger. + :type reference_trigger: ~azure.synapse.artifacts.models.TriggerReference + """ + + _validation = { + 'type': {'required': True}, + 'reference_trigger': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_trigger': {'key': 'referenceTrigger', 'type': 'TriggerReference'}, + } + + _subtype_map = { + 'type': {'TumblingWindowTriggerDependencyReference': 'TumblingWindowTriggerDependencyReference'} + } + + def __init__( + self, + *, + reference_trigger: "TriggerReference", + **kwargs + ): + super(TriggerDependencyReference, self).__init__(**kwargs) + self.type = 'TriggerDependencyReference' # type: str + self.reference_trigger = reference_trigger + + class TriggerListResponse(msrest.serialization.Model): """A list of trigger resources. @@ -22087,20 +34263,55 @@ def __init__( self.parameters = parameters -class TriggerResource(SubResource): +class TriggerReference(msrest.serialization.Model): + """Trigger reference type. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Trigger reference type. Possible values include: "TriggerReference". + :type type: str or ~azure.synapse.artifacts.models.TriggerReferenceType + :param reference_name: Required. Reference trigger name. + :type reference_name: str + """ + + _validation = { + 'type': {'required': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + } + + def __init__( + self, + *, + type: Union[str, "TriggerReferenceType"], + reference_name: str, + **kwargs + ): + super(TriggerReference, self).__init__(**kwargs) + self.type = type + self.reference_name = reference_name + + +class TriggerResource(AzureEntityResource): """Trigger resource type. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :ivar id: The resource identifier. + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. :vartype id: str - :ivar name: The resource name. + :ivar name: The name of the resource. :vartype name: str - :ivar type: The resource type. + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. :vartype type: str - :ivar etag: Etag identifies change in the resource. + :ivar etag: Resource Etag. :vartype etag: str :param properties: Required. Properties of the trigger. :type properties: ~azure.synapse.artifacts.models.Trigger @@ -22217,51 +34428,201 @@ class TriggerRunsQueryResponse(msrest.serialization.Model): } _attribute_map = { - 'value': {'key': 'value', 'type': '[TriggerRun]'}, - 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, + 'value': {'key': 'value', 'type': '[TriggerRun]'}, + 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, + } + + def __init__( + self, + *, + value: List["TriggerRun"], + continuation_token: Optional[str] = None, + **kwargs + ): + super(TriggerRunsQueryResponse, self).__init__(**kwargs) + self.value = value + self.continuation_token = continuation_token + + +class TriggerSubscriptionOperationStatus(msrest.serialization.Model): + """Defines the response of a trigger subscription operation. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar trigger_name: Trigger name. + :vartype trigger_name: str + :ivar status: Event Subscription Status. Possible values include: "Enabled", "Provisioning", + "Deprovisioning", "Disabled", "Unknown". + :vartype status: str or ~azure.synapse.artifacts.models.EventSubscriptionStatus + """ + + _validation = { + 'trigger_name': {'readonly': True}, + 'status': {'readonly': True}, + } + + _attribute_map = { + 'trigger_name': {'key': 'triggerName', 'type': 'str'}, + 'status': {'key': 'status', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(TriggerSubscriptionOperationStatus, self).__init__(**kwargs) + self.trigger_name = None + self.status = None + + +class TumblingWindowTrigger(Trigger): + """Trigger that schedules pipeline runs for all fixed time interval windows from a start time without gaps and also supports backfill scenarios (when start time is in the past). + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Trigger type.Constant filled by server. + :type type: str + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are + called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". + :vartype runtime_state: str or ~azure.synapse.artifacts.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the trigger. + :type annotations: list[object] + :param pipeline: Required. Pipeline for which runs are created when an event is fired for + trigger window that is ready. + :type pipeline: ~azure.synapse.artifacts.models.TriggerPipelineReference + :param frequency: Required. The frequency of the time windows. Possible values include: + "Minute", "Hour". + :type frequency: str or ~azure.synapse.artifacts.models.TumblingWindowFrequency + :param interval: Required. The interval of the time windows. The minimum interval allowed is 15 + Minutes. + :type interval: int + :param start_time: Required. The start time for the time period for the trigger during which + events are fired for windows that are ready. Only UTC time is currently supported. + :type start_time: ~datetime.datetime + :param end_time: The end time for the time period for the trigger during which events are fired + for windows that are ready. Only UTC time is currently supported. + :type end_time: ~datetime.datetime + :param delay: Specifies how long the trigger waits past due time before triggering new run. It + doesn't alter window start and end time. The default is 0. Type: string (or Expression with + resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type delay: object + :param max_concurrency: Required. The max number of parallel time windows (ready for execution) + for which a new run is triggered. + :type max_concurrency: int + :param retry_policy: Retry policy that will be applied for failed pipeline runs. + :type retry_policy: ~azure.synapse.artifacts.models.RetryPolicy + :param depends_on: Triggers that this trigger depends on. Only tumbling window triggers are + supported. + :type depends_on: list[~azure.synapse.artifacts.models.DependencyReference] + """ + + _validation = { + 'type': {'required': True}, + 'runtime_state': {'readonly': True}, + 'pipeline': {'required': True}, + 'frequency': {'required': True}, + 'interval': {'required': True}, + 'start_time': {'required': True}, + 'max_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'pipeline': {'key': 'pipeline', 'type': 'TriggerPipelineReference'}, + 'frequency': {'key': 'typeProperties.frequency', 'type': 'str'}, + 'interval': {'key': 'typeProperties.interval', 'type': 'int'}, + 'start_time': {'key': 'typeProperties.startTime', 'type': 'iso-8601'}, + 'end_time': {'key': 'typeProperties.endTime', 'type': 'iso-8601'}, + 'delay': {'key': 'typeProperties.delay', 'type': 'object'}, + 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, + 'retry_policy': {'key': 'typeProperties.retryPolicy', 'type': 'RetryPolicy'}, + 'depends_on': {'key': 'typeProperties.dependsOn', 'type': '[DependencyReference]'}, } def __init__( self, *, - value: List["TriggerRun"], - continuation_token: Optional[str] = None, + pipeline: "TriggerPipelineReference", + frequency: Union[str, "TumblingWindowFrequency"], + interval: int, + start_time: datetime.datetime, + max_concurrency: int, + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + annotations: Optional[List[object]] = None, + end_time: Optional[datetime.datetime] = None, + delay: Optional[object] = None, + retry_policy: Optional["RetryPolicy"] = None, + depends_on: Optional[List["DependencyReference"]] = None, **kwargs ): - super(TriggerRunsQueryResponse, self).__init__(**kwargs) - self.value = value - self.continuation_token = continuation_token + super(TumblingWindowTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) + self.type = 'TumblingWindowTrigger' # type: str + self.pipeline = pipeline + self.frequency = frequency + self.interval = interval + self.start_time = start_time + self.end_time = end_time + self.delay = delay + self.max_concurrency = max_concurrency + self.retry_policy = retry_policy + self.depends_on = depends_on -class TriggerSubscriptionOperationStatus(msrest.serialization.Model): - """Defines the response of a trigger subscription operation. +class TumblingWindowTriggerDependencyReference(TriggerDependencyReference): + """Referenced tumbling window trigger dependency. - Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. - :ivar trigger_name: Trigger name. - :vartype trigger_name: str - :ivar status: Event Subscription Status. Possible values include: "Enabled", "Provisioning", - "Deprovisioning", "Disabled", "Unknown". - :vartype status: str or ~azure.synapse.artifacts.models.EventSubscriptionStatus + :param type: Required. The type of dependency reference.Constant filled by server. + :type type: str + :param reference_trigger: Required. Referenced trigger. + :type reference_trigger: ~azure.synapse.artifacts.models.TriggerReference + :param offset: Timespan applied to the start time of a tumbling window when evaluating + dependency. + :type offset: str + :param size: The size of the window when evaluating the dependency. If undefined the frequency + of the tumbling window will be used. + :type size: str """ _validation = { - 'trigger_name': {'readonly': True}, - 'status': {'readonly': True}, + 'type': {'required': True}, + 'reference_trigger': {'required': True}, + 'offset': {'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, + 'size': {'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, } _attribute_map = { - 'trigger_name': {'key': 'triggerName', 'type': 'str'}, - 'status': {'key': 'status', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'reference_trigger': {'key': 'referenceTrigger', 'type': 'TriggerReference'}, + 'offset': {'key': 'offset', 'type': 'str'}, + 'size': {'key': 'size', 'type': 'str'}, } def __init__( self, + *, + reference_trigger: "TriggerReference", + offset: Optional[str] = None, + size: Optional[str] = None, **kwargs ): - super(TriggerSubscriptionOperationStatus, self).__init__(**kwargs) - self.trigger_name = None - self.status = None + super(TumblingWindowTriggerDependencyReference, self).__init__(reference_trigger=reference_trigger, **kwargs) + self.type = 'TumblingWindowTriggerDependencyReference' # type: str + self.offset = offset + self.size = size class UntilActivity(Activity): @@ -22547,6 +34908,63 @@ def __init__( self.encrypted_credential = encrypted_credential +class VerticaSource(TabularSource): + """A copy activity Vertica source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(VerticaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'VerticaSource' # type: str + self.query = query + + class VerticaTableDataset(Dataset): """Vertica dataset. @@ -22628,6 +35046,27 @@ def __init__( self.schema_type_properties_schema = schema_type_properties_schema +class VirtualNetworkProfile(msrest.serialization.Model): + """Virtual Network Profile. + + :param compute_subnet_id: Subnet ID used for computes in workspace. + :type compute_subnet_id: str + """ + + _attribute_map = { + 'compute_subnet_id': {'key': 'computeSubnetId', 'type': 'str'}, + } + + def __init__( + self, + *, + compute_subnet_id: Optional[str] = None, + **kwargs + ): + super(VirtualNetworkProfile, self).__init__(**kwargs) + self.compute_subnet_id = compute_subnet_id + + class WaitActivity(Activity): """This activity suspends pipeline execution for the specified interval. @@ -23149,6 +35588,52 @@ def __init__( self.type_properties = type_properties +class WebSource(CopySource): + """A copy activity source for web page table. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + **kwargs + ): + super(WebSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'WebSource' # type: str + + class WebTableDataset(Dataset): """The dataset points to a HTML table in the web page. @@ -23225,85 +35710,110 @@ def __init__( self.path = path -class Workspace(Resource): - """Workspace resource type. +class Workspace(TrackedResource): + """A workspace. Variables are only populated by the server, and will be ignored when sending a request. - :ivar id: The resource identifier. + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. :vartype id: str - :ivar name: The resource name. + :ivar name: The name of the resource. :vartype name: str - :ivar type: The resource type. + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. :vartype type: str - :param location: The resource location. - :type location: str - :param tags: A set of tags. The resource tags. + :param tags: A set of tags. Resource tags. :type tags: dict[str, str] - :ivar e_tag: Etag identifies change in the resource. - :vartype e_tag: str - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param identity: Managed service identity of the workspace. - :type identity: ~azure.synapse.artifacts.models.WorkspaceIdentity - :ivar provisioning_state: Workspace provisioning state, example Succeeded. + :param location: Required. The geo-location where the resource lives. + :type location: str + :param identity: Identity of the workspace. + :type identity: ~azure.synapse.artifacts.models.ManagedIdentity + :param default_data_lake_storage: Workspace default data lake storage account details. + :type default_data_lake_storage: ~azure.synapse.artifacts.models.DataLakeStorageAccountDetails + :param sql_administrator_login_password: SQL administrator login password. + :type sql_administrator_login_password: str + :param managed_resource_group_name: Workspace managed resource group. The resource group name + uniquely identifies the resource group within the user subscriptionId. The resource group name + must be no longer than 90 characters long, and must be alphanumeric characters + (Char.IsLetterOrDigit()) and '-', '_', '(', ')' and'.'. Note that the name cannot end with '.'. + :type managed_resource_group_name: str + :ivar provisioning_state: Resource provisioning state. :vartype provisioning_state: str - :ivar create_time: Time the workspace was created in ISO8601 format. - :vartype create_time: ~datetime.datetime - :ivar version: Version of the workspace. - :vartype version: str - :param default_storage: Linked service reference. - :type default_storage: ~azure.synapse.artifacts.models.LinkedServiceReference - :param default_sql_server: Linked service reference. - :type default_sql_server: ~azure.synapse.artifacts.models.LinkedServiceReference + :param sql_administrator_login: Login for workspace SQL active directory administrator. + :type sql_administrator_login: str + :param virtual_network_profile: Virtual Network profile. + :type virtual_network_profile: ~azure.synapse.artifacts.models.VirtualNetworkProfile + :param connectivity_endpoints: Connectivity endpoints. + :type connectivity_endpoints: dict[str, str] + :param managed_virtual_network: Setting this to 'default' will ensure that all compute for this + workspace is in a virtual network managed on behalf of the user. + :type managed_virtual_network: str + :param private_endpoint_connections: Private endpoint connections to the workspace. + :type private_endpoint_connections: + list[~azure.synapse.artifacts.models.PrivateEndpointConnection] + :ivar extra_properties: Workspace level configs and feature flags. + :vartype extra_properties: dict[str, object] """ _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, 'type': {'readonly': True}, - 'e_tag': {'readonly': True}, + 'location': {'required': True}, 'provisioning_state': {'readonly': True}, - 'create_time': {'readonly': True}, - 'version': {'readonly': True}, + 'extra_properties': {'readonly': True}, } _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'location': {'key': 'location', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, - 'e_tag': {'key': 'eTag', 'type': 'str'}, - 'additional_properties': {'key': '', 'type': '{object}'}, - 'identity': {'key': 'identity', 'type': 'WorkspaceIdentity'}, + 'location': {'key': 'location', 'type': 'str'}, + 'identity': {'key': 'identity', 'type': 'ManagedIdentity'}, + 'default_data_lake_storage': {'key': 'properties.defaultDataLakeStorage', 'type': 'DataLakeStorageAccountDetails'}, + 'sql_administrator_login_password': {'key': 'properties.sqlAdministratorLoginPassword', 'type': 'str'}, + 'managed_resource_group_name': {'key': 'properties.managedResourceGroupName', 'type': 'str'}, 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, - 'create_time': {'key': 'properties.createTime', 'type': 'iso-8601'}, - 'version': {'key': 'properties.version', 'type': 'str'}, - 'default_storage': {'key': 'properties.defaultStorage', 'type': 'LinkedServiceReference'}, - 'default_sql_server': {'key': 'properties.defaultSqlServer', 'type': 'LinkedServiceReference'}, + 'sql_administrator_login': {'key': 'properties.sqlAdministratorLogin', 'type': 'str'}, + 'virtual_network_profile': {'key': 'properties.virtualNetworkProfile', 'type': 'VirtualNetworkProfile'}, + 'connectivity_endpoints': {'key': 'properties.connectivityEndpoints', 'type': '{str}'}, + 'managed_virtual_network': {'key': 'properties.managedVirtualNetwork', 'type': 'str'}, + 'private_endpoint_connections': {'key': 'properties.privateEndpointConnections', 'type': '[PrivateEndpointConnection]'}, + 'extra_properties': {'key': 'properties.extraProperties', 'type': '{object}'}, } def __init__( self, *, - location: Optional[str] = None, + location: str, tags: Optional[Dict[str, str]] = None, - additional_properties: Optional[Dict[str, object]] = None, - identity: Optional["WorkspaceIdentity"] = None, - default_storage: Optional["LinkedServiceReference"] = None, - default_sql_server: Optional["LinkedServiceReference"] = None, - **kwargs - ): - super(Workspace, self).__init__(location=location, tags=tags, **kwargs) - self.additional_properties = additional_properties + identity: Optional["ManagedIdentity"] = None, + default_data_lake_storage: Optional["DataLakeStorageAccountDetails"] = None, + sql_administrator_login_password: Optional[str] = None, + managed_resource_group_name: Optional[str] = None, + sql_administrator_login: Optional[str] = None, + virtual_network_profile: Optional["VirtualNetworkProfile"] = None, + connectivity_endpoints: Optional[Dict[str, str]] = None, + managed_virtual_network: Optional[str] = None, + private_endpoint_connections: Optional[List["PrivateEndpointConnection"]] = None, + **kwargs + ): + super(Workspace, self).__init__(tags=tags, location=location, **kwargs) self.identity = identity + self.default_data_lake_storage = default_data_lake_storage + self.sql_administrator_login_password = sql_administrator_login_password + self.managed_resource_group_name = managed_resource_group_name self.provisioning_state = None - self.create_time = None - self.version = None - self.default_storage = default_storage - self.default_sql_server = default_sql_server + self.sql_administrator_login = sql_administrator_login + self.virtual_network_profile = virtual_network_profile + self.connectivity_endpoints = connectivity_endpoints + self.managed_virtual_network = managed_virtual_network + self.private_endpoint_connections = private_endpoint_connections + self.extra_properties = None class WorkspaceIdentity(msrest.serialization.Model): @@ -23530,6 +36040,63 @@ def __init__( self.table_name = table_name +class XeroSource(TabularSource): + """A copy activity Xero Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(XeroSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'XeroSource' # type: str + self.query = query + + class ZohoLinkedService(LinkedService): """Zoho server linked service. @@ -23680,3 +36247,60 @@ def __init__( super(ZohoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'ZohoObject' # type: str self.table_name = table_name + + +class ZohoSource(TabularSource): + """A copy activity Zoho server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(ZohoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'ZohoSource' # type: str + self.query = query diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/__init__.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/__init__.py index b1056111cbe8..f5afded7d4f5 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/__init__.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/__init__.py @@ -17,6 +17,10 @@ from ._sql_script_operations import SqlScriptOperations from ._spark_job_definition_operations import SparkJobDefinitionOperations from ._notebook_operations import NotebookOperations +from ._workspace_operations import WorkspaceOperations +from ._sql_pools_operations import SqlPoolsOperations +from ._big_data_pools_operations import BigDataPoolsOperations +from ._integration_runtimes_operations import IntegrationRuntimesOperations __all__ = [ 'LinkedServiceOperations', @@ -30,4 +34,8 @@ 'SqlScriptOperations', 'SparkJobDefinitionOperations', 'NotebookOperations', + 'WorkspaceOperations', + 'SqlPoolsOperations', + 'BigDataPoolsOperations', + 'IntegrationRuntimesOperations', ] diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_big_data_pools_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_big_data_pools_operations.py new file mode 100644 index 000000000000..9c95da14db58 --- /dev/null +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_big_data_pools_operations.py @@ -0,0 +1,152 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class BigDataPoolsOperations(object): + """BigDataPoolsOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.synapse.artifacts.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + **kwargs # type: Any + ): + # type: (...) -> "models.BigDataPoolResourceInfoListResult" + """List Big Data Pools. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: BigDataPoolResourceInfoListResult, or the result of cls(response) + :rtype: ~azure.synapse.artifacts.models.BigDataPoolResourceInfoListResult + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.BigDataPoolResourceInfoListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2019-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ErrorContract, response) + raise HttpResponseError(response=response, model=error) + + deserialized = self._deserialize('BigDataPoolResourceInfoListResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + list.metadata = {'url': '/bigDataPools'} # type: ignore + + def get( + self, + big_data_pool_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "models.BigDataPoolResourceInfo" + """Get Big Data Pool. + + :param big_data_pool_name: The Big Data Pool name. + :type big_data_pool_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: BigDataPoolResourceInfo, or the result of cls(response) + :rtype: ~azure.synapse.artifacts.models.BigDataPoolResourceInfo + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.BigDataPoolResourceInfo"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2019-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + 'bigDataPoolName': self._serialize.url("big_data_pool_name", big_data_pool_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ErrorContract, response) + raise HttpResponseError(response=response, model=error) + + deserialized = self._deserialize('BigDataPoolResourceInfo', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/bigDataPools/{bigDataPoolName}'} # type: ignore diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_data_flow_debug_session_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_data_flow_debug_session_operations.py index a185a33b87b5..7a914211996f 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_data_flow_debug_session_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_data_flow_debug_session_operations.py @@ -8,7 +8,7 @@ from typing import TYPE_CHECKING import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse @@ -53,7 +53,9 @@ def _create_data_flow_debug_session_initial( ): # type: (...) -> Optional["models.CreateDataFlowDebugSessionResponse"] cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.CreateDataFlowDebugSessionResponse"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") @@ -172,7 +174,9 @@ def query_data_flow_debug_sessions_by_workspace( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.QueryDataFlowDebugSessionsResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" accept = "application/json" @@ -245,7 +249,9 @@ def add_data_flow( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.AddDataFlowToDebugSessionResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") @@ -303,7 +309,9 @@ def delete_data_flow_debug_session( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") @@ -349,7 +357,9 @@ def _execute_command_initial( ): # type: (...) -> Optional["models.DataFlowDebugCommandResponse"] cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.DataFlowDebugCommandResponse"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_data_flow_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_data_flow_operations.py index 83d0ef3b07f9..6e4c9941c4a8 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_data_flow_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_data_flow_operations.py @@ -8,7 +8,7 @@ from typing import TYPE_CHECKING import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse @@ -55,7 +55,9 @@ def _create_or_update_data_flow_initial( ): # type: (...) -> Optional["models.DataFlowResource"] cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.DataFlowResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) _data_flow = models.DataFlowResource(properties=properties) @@ -191,7 +193,9 @@ def get_data_flow( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" accept = "application/json" @@ -238,7 +242,9 @@ def _delete_data_flow_initial( ): # type: (...) -> None cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" accept = "application/json" @@ -341,7 +347,9 @@ def get_data_flows_by_workspace( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" accept = "application/json" diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_dataset_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_dataset_operations.py index 47cab4d4fa17..55d5e21411a8 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_dataset_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_dataset_operations.py @@ -8,7 +8,7 @@ from typing import TYPE_CHECKING import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse @@ -59,7 +59,9 @@ def get_datasets_by_workspace( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.DatasetListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" accept = "application/json" @@ -125,7 +127,9 @@ def _create_or_update_dataset_initial( ): # type: (...) -> Optional["models.DatasetResource"] cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.DatasetResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) _dataset = models.DatasetResource(properties=properties) @@ -261,7 +265,9 @@ def get_dataset( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.DatasetResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" accept = "application/json" @@ -310,7 +316,9 @@ def _delete_dataset_initial( ): # type: (...) -> None cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" accept = "application/json" diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_integration_runtimes_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_integration_runtimes_operations.py new file mode 100644 index 000000000000..3d622cd8a2aa --- /dev/null +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_integration_runtimes_operations.py @@ -0,0 +1,152 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class IntegrationRuntimesOperations(object): + """IntegrationRuntimesOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.synapse.artifacts.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + **kwargs # type: Any + ): + # type: (...) -> "models.IntegrationRuntimeListResponse" + """List Integration Runtimes. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IntegrationRuntimeListResponse, or the result of cls(response) + :rtype: ~azure.synapse.artifacts.models.IntegrationRuntimeListResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeListResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2019-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ErrorContract, response) + raise HttpResponseError(response=response, model=error) + + deserialized = self._deserialize('IntegrationRuntimeListResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + list.metadata = {'url': '/integrationRuntimes'} # type: ignore + + def get( + self, + integration_runtime_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "models.IntegrationRuntimeResource" + """Get Integration Runtime. + + :param integration_runtime_name: The Integration Runtime name. + :type integration_runtime_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IntegrationRuntimeResource, or the result of cls(response) + :rtype: ~azure.synapse.artifacts.models.IntegrationRuntimeResource + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeResource"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2019-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ErrorContract, response) + raise HttpResponseError(response=response, model=error) + + deserialized = self._deserialize('IntegrationRuntimeResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/integrationRuntimes/{integrationRuntimeName}'} # type: ignore diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_linked_service_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_linked_service_operations.py index 763f281bccaa..9ec940e40b5a 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_linked_service_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_linked_service_operations.py @@ -8,7 +8,7 @@ from typing import TYPE_CHECKING import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse @@ -59,7 +59,9 @@ def get_linked_services_by_workspace( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.LinkedServiceListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" accept = "application/json" @@ -125,7 +127,9 @@ def _create_or_update_linked_service_initial( ): # type: (...) -> Optional["models.LinkedServiceResource"] cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.LinkedServiceResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) _linked_service = models.LinkedServiceResource(properties=properties) @@ -262,7 +266,9 @@ def get_linked_service( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.LinkedServiceResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" accept = "application/json" @@ -311,7 +317,9 @@ def _delete_linked_service_initial( ): # type: (...) -> None cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" accept = "application/json" diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_notebook_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_notebook_operations.py index 1594c92e9249..4aa1b37ac30e 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_notebook_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_notebook_operations.py @@ -8,7 +8,7 @@ from typing import TYPE_CHECKING import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse @@ -59,7 +59,9 @@ def get_notebooks_by_workspace( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.NotebookListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" accept = "application/json" @@ -129,7 +131,9 @@ def get_notebook_summary_by_work_space( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.NotebookListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" accept = "application/json" @@ -195,7 +199,9 @@ def _create_or_update_notebook_initial( ): # type: (...) -> Optional["models.NotebookResource"] cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.NotebookResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) _notebook = models.NotebookResource(properties=properties) @@ -331,7 +337,9 @@ def get_notebook( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.NotebookResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" accept = "application/json" @@ -380,7 +388,9 @@ def _delete_notebook_initial( ): # type: (...) -> None cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" accept = "application/json" diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_pipeline_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_pipeline_operations.py index 0a0d5114de9a..f731d88c245e 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_pipeline_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_pipeline_operations.py @@ -8,7 +8,7 @@ from typing import TYPE_CHECKING import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse @@ -59,7 +59,9 @@ def get_pipelines_by_workspace( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" accept = "application/json" @@ -125,7 +127,9 @@ def _create_or_update_pipeline_initial( ): # type: (...) -> Optional["models.PipelineResource"] cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.PipelineResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") @@ -259,7 +263,9 @@ def get_pipeline( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.PipelineResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" accept = "application/json" @@ -308,7 +314,9 @@ def _delete_pipeline_initial( ): # type: (...) -> None cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" accept = "application/json" @@ -430,7 +438,9 @@ def create_pipeline_run( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.CreateRunResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_pipeline_run_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_pipeline_run_operations.py index b70337659f64..b4465926cd9e 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_pipeline_run_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_pipeline_run_operations.py @@ -8,7 +8,7 @@ from typing import TYPE_CHECKING import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse @@ -59,7 +59,9 @@ def query_pipeline_runs_by_workspace( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineRunsQueryResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") @@ -117,7 +119,9 @@ def get_pipeline_run( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineRun"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" accept = "application/json" @@ -177,7 +181,9 @@ def query_activity_runs( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ActivityRunsQueryResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") @@ -241,7 +247,9 @@ def cancel_pipeline_run( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" accept = "application/json" diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_spark_job_definition_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_spark_job_definition_operations.py index 25d404cab281..2140afe302d4 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_spark_job_definition_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_spark_job_definition_operations.py @@ -8,7 +8,7 @@ from typing import TYPE_CHECKING import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse @@ -59,7 +59,9 @@ def get_spark_job_definitions_by_workspace( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SparkJobDefinitionsListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" accept = "application/json" @@ -139,7 +141,9 @@ def create_or_update_spark_job_definition( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SparkJobDefinitionResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) _spark_job_definition = models.SparkJobDefinitionResource(properties=properties) @@ -207,7 +211,9 @@ def get_spark_job_definition( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.SparkJobDefinitionResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" accept = "application/json" @@ -265,7 +271,9 @@ def delete_spark_job_definition( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" accept = "application/json" @@ -307,7 +315,9 @@ def _execute_spark_job_definition_initial( ): # type: (...) -> "models.SparkBatchJob" cls = kwargs.pop('cls', None) # type: ClsType["models.SparkBatchJob"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" accept = "application/json" @@ -414,7 +424,9 @@ def _debug_spark_job_definition_initial( ): # type: (...) -> "models.SparkBatchJob" cls = kwargs.pop('cls', None) # type: ClsType["models.SparkBatchJob"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) _spark_job_definition_azure_resource = models.SparkJobDefinitionResource(properties=properties) diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_sql_pools_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_sql_pools_operations.py new file mode 100644 index 000000000000..0f5b2d5293ae --- /dev/null +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_sql_pools_operations.py @@ -0,0 +1,152 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class SqlPoolsOperations(object): + """SqlPoolsOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.synapse.artifacts.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + **kwargs # type: Any + ): + # type: (...) -> "models.SqlPoolInfoListResult" + """List Sql Pools. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: SqlPoolInfoListResult, or the result of cls(response) + :rtype: ~azure.synapse.artifacts.models.SqlPoolInfoListResult + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.SqlPoolInfoListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2019-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ErrorContract, response) + raise HttpResponseError(response=response, model=error) + + deserialized = self._deserialize('SqlPoolInfoListResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + list.metadata = {'url': '/sqlPools'} # type: ignore + + def get( + self, + sql_pool_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "models.SqlPool" + """Get Sql Pool. + + :param sql_pool_name: The Sql Pool name. + :type sql_pool_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: SqlPool, or the result of cls(response) + :rtype: ~azure.synapse.artifacts.models.SqlPool + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.SqlPool"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2019-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + 'sqlPoolName': self._serialize.url("sql_pool_name", sql_pool_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ErrorContract, response) + raise HttpResponseError(response=response, model=error) + + deserialized = self._deserialize('SqlPool', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/sqlPools/{sqlPoolName}'} # type: ignore diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_sql_script_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_sql_script_operations.py index c13ee57ea579..9a21204f45da 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_sql_script_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_sql_script_operations.py @@ -8,7 +8,7 @@ from typing import TYPE_CHECKING import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse @@ -57,7 +57,9 @@ def get_sql_scripts_by_workspace( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SqlScriptsListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" accept = "application/json" @@ -137,7 +139,9 @@ def create_or_update_sql_script( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SqlScriptResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) _sql_script = models.SqlScriptResource(properties=properties) @@ -204,7 +208,9 @@ def get_sql_script( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.SqlScriptResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" accept = "application/json" @@ -262,7 +268,9 @@ def delete_sql_script( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" accept = "application/json" diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_trigger_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_trigger_operations.py index 4facb9084857..5d6ef2bf8c6f 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_trigger_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_trigger_operations.py @@ -8,7 +8,7 @@ from typing import TYPE_CHECKING import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse @@ -59,7 +59,9 @@ def get_triggers_by_workspace( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" accept = "application/json" @@ -125,7 +127,9 @@ def _create_or_update_trigger_initial( ): # type: (...) -> Optional["models.TriggerResource"] cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) _trigger = models.TriggerResource(properties=properties) @@ -261,7 +265,9 @@ def get_trigger( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" accept = "application/json" @@ -310,7 +316,9 @@ def _delete_trigger_initial( ): # type: (...) -> None cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" accept = "application/json" @@ -407,7 +415,9 @@ def _subscribe_trigger_to_events_initial( ): # type: (...) -> Optional["models.TriggerSubscriptionOperationStatus"] cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerSubscriptionOperationStatus"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" accept = "application/json" @@ -521,7 +531,9 @@ def get_event_subscription_status( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerSubscriptionOperationStatus"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" accept = "application/json" @@ -566,7 +578,9 @@ def _unsubscribe_trigger_from_events_initial( ): # type: (...) -> Optional["models.TriggerSubscriptionOperationStatus"] cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerSubscriptionOperationStatus"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" accept = "application/json" @@ -671,7 +685,9 @@ def _start_trigger_initial( ): # type: (...) -> None cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" accept = "application/json" @@ -768,7 +784,9 @@ def _stop_trigger_initial( ): # type: (...) -> None cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" accept = "application/json" diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_trigger_run_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_trigger_run_operations.py index 1b9b836166ad..ba4a1dda25fd 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_trigger_run_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_trigger_run_operations.py @@ -8,7 +8,7 @@ from typing import TYPE_CHECKING import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse @@ -62,7 +62,9 @@ def rerun_trigger_instance( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" accept = "application/json" @@ -117,7 +119,9 @@ def cancel_trigger_instance( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" accept = "application/json" @@ -169,7 +173,9 @@ def query_trigger_runs_by_workspace( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerRunsQueryResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_workspace_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_workspace_operations.py new file mode 100644 index 000000000000..6c361602a712 --- /dev/null +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_workspace_operations.py @@ -0,0 +1,96 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class WorkspaceOperations(object): + """WorkspaceOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.synapse.artifacts.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def get( + self, + **kwargs # type: Any + ): + # type: (...) -> "models.Workspace" + """Get Workspace. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Workspace, or the result of cls(response) + :rtype: ~azure.synapse.artifacts.models.Workspace + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.Workspace"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2019-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ErrorContract, response) + raise HttpResponseError(response=response, model=error) + + deserialized = self._deserialize('Workspace', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/workspace'} # type: ignore diff --git a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/_spark_client.py b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/_spark_client.py index 5d4e57c46569..a3dfa46c408d 100644 --- a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/_spark_client.py +++ b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/_spark_client.py @@ -55,6 +55,7 @@ def __init__( client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} self._serialize = Serializer(client_models) + self._serialize.client_side_validation = False self._deserialize = Deserializer(client_models) self.spark_batch = SparkBatchOperations( diff --git a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/_spark_client.py b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/_spark_client.py index 1bbd03a040ce..94b34fd756b0 100644 --- a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/_spark_client.py +++ b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/_spark_client.py @@ -52,6 +52,7 @@ def __init__( client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} self._serialize = Serializer(client_models) + self._serialize.client_side_validation = False self._deserialize = Deserializer(client_models) self.spark_batch = SparkBatchOperations( diff --git a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/operations/_spark_batch_operations.py b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/operations/_spark_batch_operations.py index f9f9c08c0792..6274c73329d1 100644 --- a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/operations/_spark_batch_operations.py +++ b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/operations/_spark_batch_operations.py @@ -8,7 +8,7 @@ from typing import Any, Callable, Dict, Generic, Optional, TypeVar import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest @@ -62,7 +62,9 @@ async def get_spark_batch_jobs( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SparkBatchJobCollection"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) accept = "application/json" @@ -123,7 +125,9 @@ async def create_spark_batch_job( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SparkBatchJob"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -185,7 +189,9 @@ async def get_spark_batch_job( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SparkBatchJob"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) accept = "application/json" @@ -239,7 +245,9 @@ async def cancel_spark_batch_job( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) # Construct URL diff --git a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/operations/_spark_session_operations.py b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/operations/_spark_session_operations.py index e90a6e27876c..05326e8d0005 100644 --- a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/operations/_spark_session_operations.py +++ b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/operations/_spark_session_operations.py @@ -8,7 +8,7 @@ from typing import Any, Callable, Dict, Generic, Optional, TypeVar import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest @@ -62,7 +62,9 @@ async def get_spark_sessions( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SparkSessionCollection"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) accept = "application/json" @@ -123,7 +125,9 @@ async def create_spark_session( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SparkSession"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -185,7 +189,9 @@ async def get_spark_session( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SparkSession"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) accept = "application/json" @@ -239,7 +245,9 @@ async def cancel_spark_session( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) # Construct URL @@ -286,7 +294,9 @@ async def reset_spark_session_timeout( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) # Construct URL @@ -333,7 +343,9 @@ async def get_spark_statements( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SparkStatementCollection"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) accept = "application/json" @@ -388,7 +400,9 @@ async def create_spark_statement( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SparkStatement"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -448,7 +462,9 @@ async def get_spark_statement( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SparkStatement"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) accept = "application/json" @@ -504,7 +520,9 @@ async def cancel_spark_statement( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SparkStatementCancellationResult"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) accept = "application/json" diff --git a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/operations/_spark_batch_operations.py b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/operations/_spark_batch_operations.py index 786a73ea80bb..fa0fd902866a 100644 --- a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/operations/_spark_batch_operations.py +++ b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/operations/_spark_batch_operations.py @@ -8,7 +8,7 @@ from typing import TYPE_CHECKING import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse @@ -67,7 +67,9 @@ def get_spark_batch_jobs( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SparkBatchJobCollection"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) accept = "application/json" @@ -129,7 +131,9 @@ def create_spark_batch_job( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SparkBatchJob"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -192,7 +196,9 @@ def get_spark_batch_job( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SparkBatchJob"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) accept = "application/json" @@ -247,7 +253,9 @@ def cancel_spark_batch_job( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) # Construct URL diff --git a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/operations/_spark_session_operations.py b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/operations/_spark_session_operations.py index 3f63be6d8756..a28ae36c3e79 100644 --- a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/operations/_spark_session_operations.py +++ b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/operations/_spark_session_operations.py @@ -8,7 +8,7 @@ from typing import TYPE_CHECKING import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse @@ -67,7 +67,9 @@ def get_spark_sessions( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SparkSessionCollection"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) accept = "application/json" @@ -129,7 +131,9 @@ def create_spark_session( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SparkSession"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -192,7 +196,9 @@ def get_spark_session( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SparkSession"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) accept = "application/json" @@ -247,7 +253,9 @@ def cancel_spark_session( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) # Construct URL @@ -295,7 +303,9 @@ def reset_spark_session_timeout( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) # Construct URL @@ -343,7 +353,9 @@ def get_spark_statements( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SparkStatementCollection"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) accept = "application/json" @@ -399,7 +411,9 @@ def create_spark_statement( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SparkStatement"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -460,7 +474,9 @@ def get_spark_statement( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SparkStatement"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) accept = "application/json" @@ -517,7 +533,9 @@ def cancel_spark_statement( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SparkStatementCancellationResult"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) accept = "application/json"