diff --git a/sdk/synapse/azure-synapse-accesscontrol/CHANGELOG.md b/sdk/synapse/azure-synapse-accesscontrol/CHANGELOG.md index 9fc398f95895..bb80dbe7182f 100644 --- a/sdk/synapse/azure-synapse-accesscontrol/CHANGELOG.md +++ b/sdk/synapse/azure-synapse-accesscontrol/CHANGELOG.md @@ -1,5 +1,9 @@ # Release History +## 0.3.0 (2020-09-15) + +* Internal bugfixes (re-generated with latest generator) + ## 0.2.0 (2020-07-01) * Initial Release diff --git a/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/__init__.py b/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/__init__.py index 37019086a1cd..aa7e87061bc7 100644 --- a/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/__init__.py +++ b/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/__init__.py @@ -13,7 +13,7 @@ __all__ = ['AccessControlClient'] try: - from ._patch import patch_sdk + from ._patch import patch_sdk # type: ignore patch_sdk() except ImportError: pass diff --git a/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/_access_control_client.py b/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/_access_control_client.py index 1ffdfa416790..0a13403bfad0 100644 --- a/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/_access_control_client.py +++ b/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/_access_control_client.py @@ -29,7 +29,6 @@ class AccessControlClient(AccessControlClientOperationsMixin): :type credential: ~azure.core.credentials.TokenCredential :param endpoint: The workspace development endpoint, for example https://myworkspace.dev.azuresynapse.net. :type endpoint: str - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. """ def __init__( @@ -45,6 +44,7 @@ def __init__( client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} self._serialize = Serializer(client_models) + self._serialize.client_side_validation = False self._deserialize = Deserializer(client_models) diff --git a/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/_configuration.py b/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/_configuration.py index e69c182dd453..14b01add185f 100644 --- a/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/_configuration.py +++ b/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/_configuration.py @@ -48,8 +48,7 @@ def __init__( self.credential = credential self.endpoint = endpoint self.api_version = "2020-02-01-preview" - self.credential_scopes = ['https://dev.azuresynapse.net/.default'] - self.credential_scopes.extend(kwargs.pop('credential_scopes', [])) + self.credential_scopes = kwargs.pop('credential_scopes', ['https://dev.azuresynapse.net/.default']) kwargs.setdefault('sdk_moniker', 'synapse/{}'.format(VERSION)) self._configure(**kwargs) @@ -62,6 +61,7 @@ def _configure( self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get('http_logging_policy') or policies.HttpLoggingPolicy(**kwargs) self.retry_policy = kwargs.get('retry_policy') or policies.RetryPolicy(**kwargs) self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) self.redirect_policy = kwargs.get('redirect_policy') or policies.RedirectPolicy(**kwargs) diff --git a/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/_metadata.json b/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/_metadata.json deleted file mode 100644 index 2e94b68c65a2..000000000000 --- a/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/_metadata.json +++ /dev/null @@ -1,129 +0,0 @@ -{ - "chosen_version": "2020-02-01-preview", - "total_api_version_list": ["2020-02-01-preview"], - "client": { - "name": "AccessControlClient", - "filename": "_access_control_client", - "description": "AccessControlClient." - }, - "global_parameters": { - "sync_method": { - "credential": { - "method_signature": "credential, # type: \"TokenCredential\"", - "description": "Credential needed for the client to connect to Azure.", - "docstring_type": "~azure.core.credentials.TokenCredential", - "required": true - }, - "endpoint": { - "method_signature": "endpoint, # type: str", - "description": "The workspace development endpoint, for example https://myworkspace.dev.azuresynapse.net.", - "docstring_type": "str", - "required": true - } - }, - "async_method": { - "credential": { - "method_signature": "credential, # type: \"AsyncTokenCredential\"", - "description": "Credential needed for the client to connect to Azure.", - "docstring_type": "~azure.core.credentials_async.AsyncTokenCredential", - "required": true - }, - "endpoint": { - "method_signature": "endpoint, # type: str", - "description": "The workspace development endpoint, for example https://myworkspace.dev.azuresynapse.net.", - "docstring_type": "str", - "required": true - } - }, - "constant": { - }, - "call": "credential, endpoint" - }, - "config": { - "credential": true, - "credential_scopes": ["https://dev.azuresynapse.net/.default"] - }, - "operation_groups": { - }, - "operation_mixins": { - "get_role_definitions" : { - "sync": { - "signature": "def get_role_definitions(\n self,\n **kwargs # type: Any\n):\n" - }, - "async": { - "signature": "def get_role_definitions(\n self,\n **kwargs\n) -\u003e AsyncItemPaged[\"models.RolesListResponse\"]:\n", - "coroutine": false - }, - "doc": " \"\"\"List roles.\n\n:keyword callable cls: A custom type or function that will be passed the direct response\n:return: RolesListResponse, or the result of cls(response)\n:rtype: ~azure.synapse.accesscontrol.models.RolesListResponse\n:raises: ~azure.core.exceptions.HttpResponseError\n\"\"\"", - "call": "" - }, - "get_role_definition_by_id" : { - "sync": { - "signature": "def get_role_definition_by_id(\n self,\n role_id, # type: str\n **kwargs # type: Any\n):\n" - }, - "async": { - "signature": "async def get_role_definition_by_id(\n self,\n role_id: str,\n **kwargs\n) -\u003e \"models.SynapseRole\":\n", - "coroutine": true - }, - "doc": " \"\"\"Get role by role Id.\n\n:param role_id: Synapse Built-In Role Id.\n:type role_id: str\n:keyword callable cls: A custom type or function that will be passed the direct response\n:return: SynapseRole, or the result of cls(response)\n:rtype: ~azure.synapse.accesscontrol.models.SynapseRole\n:raises: ~azure.core.exceptions.HttpResponseError\n\"\"\"", - "call": "role_id" - }, - "create_role_assignment" : { - "sync": { - "signature": "def create_role_assignment(\n self,\n create_role_assignment_options, # type: \"models.RoleAssignmentOptions\"\n **kwargs # type: Any\n):\n" - }, - "async": { - "signature": "async def create_role_assignment(\n self,\n create_role_assignment_options: \"models.RoleAssignmentOptions\",\n **kwargs\n) -\u003e \"models.RoleAssignmentDetails\":\n", - "coroutine": true - }, - "doc": " \"\"\"Create role assignment.\n\n:param create_role_assignment_options: Details of role id and object id.\n:type create_role_assignment_options: ~azure.synapse.accesscontrol.models.RoleAssignmentOptions\n:keyword callable cls: A custom type or function that will be passed the direct response\n:return: RoleAssignmentDetails, or the result of cls(response)\n:rtype: ~azure.synapse.accesscontrol.models.RoleAssignmentDetails\n:raises: ~azure.core.exceptions.HttpResponseError\n\"\"\"", - "call": "create_role_assignment_options" - }, - "get_role_assignments" : { - "sync": { - "signature": "def get_role_assignments(\n self,\n role_id=None, # type: Optional[str]\n principal_id=None, # type: Optional[str]\n continuation_token_parameter=None, # type: Optional[str]\n **kwargs # type: Any\n):\n" - }, - "async": { - "signature": "async def get_role_assignments(\n self,\n role_id: Optional[str] = None,\n principal_id: Optional[str] = None,\n continuation_token_parameter: Optional[str] = None,\n **kwargs\n) -\u003e List[\"models.RoleAssignmentDetails\"]:\n", - "coroutine": true - }, - "doc": " \"\"\"List role assignments.\n\n:param role_id: Synapse Built-In Role Id.\n:type role_id: str\n:param principal_id: Object ID of the AAD principal or security-group.\n:type principal_id: str\n:param continuation_token_parameter: Continuation token.\n:type continuation_token_parameter: str\n:keyword callable cls: A custom type or function that will be passed the direct response\n:return: list of RoleAssignmentDetails, or the result of cls(response)\n:rtype: list[~azure.synapse.accesscontrol.models.RoleAssignmentDetails]\n:raises: ~azure.core.exceptions.HttpResponseError\n\"\"\"", - "call": "role_id, principal_id, continuation_token_parameter" - }, - "get_role_assignment_by_id" : { - "sync": { - "signature": "def get_role_assignment_by_id(\n self,\n role_assignment_id, # type: str\n **kwargs # type: Any\n):\n" - }, - "async": { - "signature": "async def get_role_assignment_by_id(\n self,\n role_assignment_id: str,\n **kwargs\n) -\u003e \"models.RoleAssignmentDetails\":\n", - "coroutine": true - }, - "doc": " \"\"\"Get role assignment by role assignment Id.\n\n:param role_assignment_id: The ID of the role assignment.\n:type role_assignment_id: str\n:keyword callable cls: A custom type or function that will be passed the direct response\n:return: RoleAssignmentDetails, or the result of cls(response)\n:rtype: ~azure.synapse.accesscontrol.models.RoleAssignmentDetails\n:raises: ~azure.core.exceptions.HttpResponseError\n\"\"\"", - "call": "role_assignment_id" - }, - "delete_role_assignment_by_id" : { - "sync": { - "signature": "def delete_role_assignment_by_id(\n self,\n role_assignment_id, # type: str\n **kwargs # type: Any\n):\n" - }, - "async": { - "signature": "async def delete_role_assignment_by_id(\n self,\n role_assignment_id: str,\n **kwargs\n) -\u003e None:\n", - "coroutine": true - }, - "doc": " \"\"\"Delete role assignment by role assignment Id.\n\n:param role_assignment_id: The ID of the role assignment.\n:type role_assignment_id: str\n:keyword callable cls: A custom type or function that will be passed the direct response\n:return: None, or the result of cls(response)\n:rtype: None\n:raises: ~azure.core.exceptions.HttpResponseError\n\"\"\"", - "call": "role_assignment_id" - }, - "get_caller_role_assignments" : { - "sync": { - "signature": "def get_caller_role_assignments(\n self,\n **kwargs # type: Any\n):\n" - }, - "async": { - "signature": "async def get_caller_role_assignments(\n self,\n **kwargs\n) -\u003e List[str]:\n", - "coroutine": true - }, - "doc": " \"\"\"List role assignments of the caller.\n\n:keyword callable cls: A custom type or function that will be passed the direct response\n:return: list of str, or the result of cls(response)\n:rtype: list[str]\n:raises: ~azure.core.exceptions.HttpResponseError\n\"\"\"", - "call": "" - } - }, - "sync_imports": "{\"regular\": {\"azurecore\": {\"azure.core.exceptions\": [\"HttpResponseError\", \"ResourceExistsError\", \"ResourceNotFoundError\", \"map_error\"], \"azure.core.pipeline\": [\"PipelineResponse\"], \"azure.core.pipeline.transport\": [\"HttpRequest\", \"HttpResponse\"], \"azure.core.paging\": [\"ItemPaged\"]}, \"stdlib\": {\"warnings\": [null]}}, \"conditional\": {\"stdlib\": {\"typing\": [\"Any\", \"Callable\", \"Dict\", \"Generic\", \"Iterable\", \"List\", \"Optional\", \"TypeVar\"]}}}", - "async_imports": "{\"regular\": {\"azurecore\": {\"azure.core.exceptions\": [\"HttpResponseError\", \"ResourceExistsError\", \"ResourceNotFoundError\", \"map_error\"], \"azure.core.pipeline\": [\"PipelineResponse\"], \"azure.core.pipeline.transport\": [\"AsyncHttpResponse\", \"HttpRequest\"], \"azure.core.async_paging\": [\"AsyncItemPaged\", \"AsyncList\"]}, \"stdlib\": {\"warnings\": [null]}}, \"conditional\": {\"stdlib\": {\"typing\": [\"Any\", \"AsyncIterable\", \"Callable\", \"Dict\", \"Generic\", \"List\", \"Optional\", \"TypeVar\"]}}}" -} \ No newline at end of file diff --git a/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/_version.py b/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/_version.py index 035146e99a22..92721eef7dd5 100644 --- a/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/_version.py +++ b/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/_version.py @@ -6,4 +6,4 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "0.2.0" +VERSION = "0.3.0" diff --git a/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/aio/__init__.py b/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/aio/__init__.py index 3cec25e9e712..8eafa989fcbc 100644 --- a/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/aio/__init__.py +++ b/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/aio/__init__.py @@ -6,5 +6,5 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from ._access_control_client_async import AccessControlClient +from ._access_control_client import AccessControlClient __all__ = ['AccessControlClient'] diff --git a/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/aio/_access_control_client_async.py b/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/aio/_access_control_client.py similarity index 88% rename from sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/aio/_access_control_client_async.py rename to sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/aio/_access_control_client.py index b25236edd254..922fb6e59303 100644 --- a/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/aio/_access_control_client_async.py +++ b/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/aio/_access_control_client.py @@ -15,8 +15,8 @@ # pylint: disable=unused-import,ungrouped-imports from azure.core.credentials_async import AsyncTokenCredential -from ._configuration_async import AccessControlClientConfiguration -from .operations_async import AccessControlClientOperationsMixin +from ._configuration import AccessControlClientConfiguration +from .operations import AccessControlClientOperationsMixin from .. import models @@ -27,7 +27,6 @@ class AccessControlClient(AccessControlClientOperationsMixin): :type credential: ~azure.core.credentials_async.AsyncTokenCredential :param endpoint: The workspace development endpoint, for example https://myworkspace.dev.azuresynapse.net. :type endpoint: str - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. """ def __init__( @@ -42,6 +41,7 @@ def __init__( client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} self._serialize = Serializer(client_models) + self._serialize.client_side_validation = False self._deserialize = Deserializer(client_models) diff --git a/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/aio/_configuration_async.py b/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/aio/_configuration.py similarity index 93% rename from sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/aio/_configuration_async.py rename to sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/aio/_configuration.py index 4c5da8936401..dd26c75218a7 100644 --- a/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/aio/_configuration_async.py +++ b/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/aio/_configuration.py @@ -45,8 +45,7 @@ def __init__( self.credential = credential self.endpoint = endpoint self.api_version = "2020-02-01-preview" - self.credential_scopes = ['https://dev.azuresynapse.net/.default'] - self.credential_scopes.extend(kwargs.pop('credential_scopes', [])) + self.credential_scopes = kwargs.pop('credential_scopes', ['https://dev.azuresynapse.net/.default']) kwargs.setdefault('sdk_moniker', 'synapse/{}'.format(VERSION)) self._configure(**kwargs) @@ -58,6 +57,7 @@ def _configure( self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get('http_logging_policy') or policies.HttpLoggingPolicy(**kwargs) self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs) self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs) diff --git a/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/aio/operations_async/__init__.py b/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/aio/operations/__init__.py similarity index 85% rename from sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/aio/operations_async/__init__.py rename to sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/aio/operations/__init__.py index a13147e030fb..b4127a08ecec 100644 --- a/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/aio/operations_async/__init__.py +++ b/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/aio/operations/__init__.py @@ -6,7 +6,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from ._access_control_client_operations_async import AccessControlClientOperationsMixin +from ._access_control_client_operations import AccessControlClientOperationsMixin __all__ = [ 'AccessControlClientOperationsMixin', diff --git a/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/aio/operations_async/_access_control_client_operations_async.py b/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/aio/operations/_access_control_client_operations.py similarity index 90% rename from sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/aio/operations_async/_access_control_client_operations_async.py rename to sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/aio/operations/_access_control_client_operations.py index 11653dd0c5cd..733347bac53e 100644 --- a/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/aio/operations_async/_access_control_client_operations_async.py +++ b/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/aio/operations/_access_control_client_operations.py @@ -9,7 +9,7 @@ import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest @@ -32,14 +32,17 @@ def get_role_definitions( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.RolesListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2020-02-01-preview" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -103,9 +106,12 @@ async def get_role_definition_by_id( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SynapseRole"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2020-02-01-preview" + accept = "application/json" # Construct URL url = self.get_role_definition_by_id.metadata['url'] # type: ignore @@ -121,7 +127,7 @@ async def get_role_definition_by_id( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -155,10 +161,13 @@ async def create_role_assignment( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.RoleAssignmentDetails"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2020-02-01-preview" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.create_role_assignment.metadata['url'] # type: ignore @@ -174,13 +183,12 @@ async def create_role_assignment( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(create_role_assignment_options, 'RoleAssignmentOptions') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -218,9 +226,12 @@ async def get_role_assignments( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[List["models.RoleAssignmentDetails"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2020-02-01-preview" + accept = "application/json" # Construct URL url = self.get_role_assignments.metadata['url'] # type: ignore @@ -241,7 +252,7 @@ async def get_role_assignments( header_parameters = {} # type: Dict[str, Any] if continuation_token_parameter is not None: header_parameters['x-ms-continuation'] = self._serialize.header("continuation_token_parameter", continuation_token_parameter, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -277,9 +288,12 @@ async def get_role_assignment_by_id( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.RoleAssignmentDetails"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2020-02-01-preview" + accept = "application/json" # Construct URL url = self.get_role_assignment_by_id.metadata['url'] # type: ignore @@ -295,7 +309,7 @@ async def get_role_assignment_by_id( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -329,9 +343,12 @@ async def delete_role_assignment_by_id( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2020-02-01-preview" + accept = "application/json" # Construct URL url = self.delete_role_assignment_by_id.metadata['url'] # type: ignore @@ -347,6 +364,7 @@ async def delete_role_assignment_by_id( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -374,9 +392,12 @@ async def get_caller_role_assignments( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[List[str]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2020-02-01-preview" + accept = "application/json" # Construct URL url = self.get_caller_role_assignments.metadata['url'] # type: ignore @@ -391,7 +412,7 @@ async def get_caller_role_assignments( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) diff --git a/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/operations/_access_control_client_operations.py b/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/operations/_access_control_client_operations.py index e27d6d28f4ff..54b7048badf9 100644 --- a/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/operations/_access_control_client_operations.py +++ b/sdk/synapse/azure-synapse-accesscontrol/azure/synapse/accesscontrol/operations/_access_control_client_operations.py @@ -8,7 +8,7 @@ from typing import TYPE_CHECKING import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse @@ -37,14 +37,17 @@ def get_role_definitions( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.RolesListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2020-02-01-preview" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -109,9 +112,12 @@ def get_role_definition_by_id( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SynapseRole"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2020-02-01-preview" + accept = "application/json" # Construct URL url = self.get_role_definition_by_id.metadata['url'] # type: ignore @@ -127,7 +133,7 @@ def get_role_definition_by_id( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -162,10 +168,13 @@ def create_role_assignment( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.RoleAssignmentDetails"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2020-02-01-preview" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.create_role_assignment.metadata['url'] # type: ignore @@ -181,13 +190,12 @@ def create_role_assignment( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(create_role_assignment_options, 'RoleAssignmentOptions') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -226,9 +234,12 @@ def get_role_assignments( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[List["models.RoleAssignmentDetails"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2020-02-01-preview" + accept = "application/json" # Construct URL url = self.get_role_assignments.metadata['url'] # type: ignore @@ -249,7 +260,7 @@ def get_role_assignments( header_parameters = {} # type: Dict[str, Any] if continuation_token_parameter is not None: header_parameters['x-ms-continuation'] = self._serialize.header("continuation_token_parameter", continuation_token_parameter, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -286,9 +297,12 @@ def get_role_assignment_by_id( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.RoleAssignmentDetails"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2020-02-01-preview" + accept = "application/json" # Construct URL url = self.get_role_assignment_by_id.metadata['url'] # type: ignore @@ -304,7 +318,7 @@ def get_role_assignment_by_id( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -339,9 +353,12 @@ def delete_role_assignment_by_id( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2020-02-01-preview" + accept = "application/json" # Construct URL url = self.delete_role_assignment_by_id.metadata['url'] # type: ignore @@ -357,6 +374,7 @@ def delete_role_assignment_by_id( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -385,9 +403,12 @@ def get_caller_role_assignments( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[List[str]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2020-02-01-preview" + accept = "application/json" # Construct URL url = self.get_caller_role_assignments.metadata['url'] # type: ignore @@ -402,7 +423,7 @@ def get_caller_role_assignments( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) diff --git a/sdk/synapse/azure-synapse-artifacts/CHANGELOG.md b/sdk/synapse/azure-synapse-artifacts/CHANGELOG.md index 9fc398f95895..722b4dfbf02c 100644 --- a/sdk/synapse/azure-synapse-artifacts/CHANGELOG.md +++ b/sdk/synapse/azure-synapse-artifacts/CHANGELOG.md @@ -1,5 +1,18 @@ # Release History +## 0.3.0 (2020-09-15) + +** Features ** + +- Add Workspace operations +- Add SqlPools operations +- Add BigDataPools operations +- Add IntegrationRuntimes operations + +** Breaking changes ** + +- Migrated most long running operation to polling mechanism (operation now starts with `begin`) + ## 0.2.0 (2020-07-01) * Initial Release diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/__init__.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/__init__.py index 0d7f64d18ff0..03703237ffc5 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/__init__.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/__init__.py @@ -13,7 +13,7 @@ __all__ = ['ArtifactsClient'] try: - from ._patch import patch_sdk + from ._patch import patch_sdk # type: ignore patch_sdk() except ImportError: pass diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/_artifacts_client.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/_artifacts_client.py index b38aadb1a7d6..cf31eab25040 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/_artifacts_client.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/_artifacts_client.py @@ -29,6 +29,10 @@ from .operations import SqlScriptOperations from .operations import SparkJobDefinitionOperations from .operations import NotebookOperations +from .operations import WorkspaceOperations +from .operations import SqlPoolsOperations +from .operations import BigDataPoolsOperations +from .operations import IntegrationRuntimesOperations from . import models @@ -57,6 +61,14 @@ class ArtifactsClient(object): :vartype spark_job_definition: azure.synapse.artifacts.operations.SparkJobDefinitionOperations :ivar notebook: NotebookOperations operations :vartype notebook: azure.synapse.artifacts.operations.NotebookOperations + :ivar workspace: WorkspaceOperations operations + :vartype workspace: azure.synapse.artifacts.operations.WorkspaceOperations + :ivar sql_pools: SqlPoolsOperations operations + :vartype sql_pools: azure.synapse.artifacts.operations.SqlPoolsOperations + :ivar big_data_pools: BigDataPoolsOperations operations + :vartype big_data_pools: azure.synapse.artifacts.operations.BigDataPoolsOperations + :ivar integration_runtimes: IntegrationRuntimesOperations operations + :vartype integration_runtimes: azure.synapse.artifacts.operations.IntegrationRuntimesOperations :param credential: Credential needed for the client to connect to Azure. :type credential: ~azure.core.credentials.TokenCredential :param endpoint: The workspace development endpoint, for example https://myworkspace.dev.azuresynapse.net. @@ -77,6 +89,7 @@ def __init__( client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} self._serialize = Serializer(client_models) + self._serialize.client_side_validation = False self._deserialize = Deserializer(client_models) self.linked_service = LinkedServiceOperations( @@ -101,6 +114,14 @@ def __init__( self._client, self._config, self._serialize, self._deserialize) self.notebook = NotebookOperations( self._client, self._config, self._serialize, self._deserialize) + self.workspace = WorkspaceOperations( + self._client, self._config, self._serialize, self._deserialize) + self.sql_pools = SqlPoolsOperations( + self._client, self._config, self._serialize, self._deserialize) + self.big_data_pools = BigDataPoolsOperations( + self._client, self._config, self._serialize, self._deserialize) + self.integration_runtimes = IntegrationRuntimesOperations( + self._client, self._config, self._serialize, self._deserialize) def close(self): # type: () -> None diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/_configuration.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/_configuration.py index 8d63a7bb27dd..5d0aff821595 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/_configuration.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/_configuration.py @@ -48,8 +48,7 @@ def __init__( self.credential = credential self.endpoint = endpoint self.api_version = "2019-06-01-preview" - self.credential_scopes = ['https://dev.azuresynapse.net/.default'] - self.credential_scopes.extend(kwargs.pop('credential_scopes', [])) + self.credential_scopes = kwargs.pop('credential_scopes', ['https://dev.azuresynapse.net/.default']) kwargs.setdefault('sdk_moniker', 'synapse/{}'.format(VERSION)) self._configure(**kwargs) @@ -62,6 +61,7 @@ def _configure( self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get('http_logging_policy') or policies.HttpLoggingPolicy(**kwargs) self.retry_policy = kwargs.get('retry_policy') or policies.RetryPolicy(**kwargs) self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) self.redirect_policy = kwargs.get('redirect_policy') or policies.RedirectPolicy(**kwargs) diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/_metadata.json b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/_metadata.json deleted file mode 100644 index 533beae51bd9..000000000000 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/_metadata.json +++ /dev/null @@ -1,63 +0,0 @@ -{ - "chosen_version": "2019-06-01-preview", - "total_api_version_list": ["2019-06-01-preview"], - "client": { - "name": "ArtifactsClient", - "filename": "_artifacts_client", - "description": "ArtifactsClient." - }, - "global_parameters": { - "sync_method": { - "credential": { - "method_signature": "credential, # type: \"TokenCredential\"", - "description": "Credential needed for the client to connect to Azure.", - "docstring_type": "~azure.core.credentials.TokenCredential", - "required": true - }, - "endpoint": { - "method_signature": "endpoint, # type: str", - "description": "The workspace development endpoint, for example https://myworkspace.dev.azuresynapse.net.", - "docstring_type": "str", - "required": true - } - }, - "async_method": { - "credential": { - "method_signature": "credential, # type: \"AsyncTokenCredential\"", - "description": "Credential needed for the client to connect to Azure.", - "docstring_type": "~azure.core.credentials_async.AsyncTokenCredential", - "required": true - }, - "endpoint": { - "method_signature": "endpoint, # type: str", - "description": "The workspace development endpoint, for example https://myworkspace.dev.azuresynapse.net.", - "docstring_type": "str", - "required": true - } - }, - "constant": { - }, - "call": "credential, endpoint" - }, - "config": { - "credential": true, - "credential_scopes": ["https://dev.azuresynapse.net/.default"] - }, - "operation_groups": { - "linked_service": "LinkedServiceOperations", - "dataset": "DatasetOperations", - "pipeline": "PipelineOperations", - "pipeline_run": "PipelineRunOperations", - "trigger": "TriggerOperations", - "trigger_run": "TriggerRunOperations", - "data_flow": "DataFlowOperations", - "data_flow_debug_session": "DataFlowDebugSessionOperations", - "sql_script": "SqlScriptOperations", - "spark_job_definition": "SparkJobDefinitionOperations", - "notebook": "NotebookOperations" - }, - "operation_mixins": { - }, - "sync_imports": "None", - "async_imports": "None" -} \ No newline at end of file diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/_version.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/_version.py index 035146e99a22..92721eef7dd5 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/_version.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/_version.py @@ -6,4 +6,4 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "0.2.0" +VERSION = "0.3.0" diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/__init__.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/__init__.py index 3d76e5630c58..16f882fa0a48 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/__init__.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/__init__.py @@ -6,5 +6,5 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from ._artifacts_client_async import ArtifactsClient +from ._artifacts_client import ArtifactsClient __all__ = ['ArtifactsClient'] diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/_artifacts_client_async.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/_artifacts_client.py similarity index 63% rename from sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/_artifacts_client_async.py rename to sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/_artifacts_client.py index e8f9d7f6c113..43c8110c15a9 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/_artifacts_client_async.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/_artifacts_client.py @@ -15,18 +15,22 @@ # pylint: disable=unused-import,ungrouped-imports from azure.core.credentials_async import AsyncTokenCredential -from ._configuration_async import ArtifactsClientConfiguration -from .operations_async import LinkedServiceOperations -from .operations_async import DatasetOperations -from .operations_async import PipelineOperations -from .operations_async import PipelineRunOperations -from .operations_async import TriggerOperations -from .operations_async import TriggerRunOperations -from .operations_async import DataFlowOperations -from .operations_async import DataFlowDebugSessionOperations -from .operations_async import SqlScriptOperations -from .operations_async import SparkJobDefinitionOperations -from .operations_async import NotebookOperations +from ._configuration import ArtifactsClientConfiguration +from .operations import LinkedServiceOperations +from .operations import DatasetOperations +from .operations import PipelineOperations +from .operations import PipelineRunOperations +from .operations import TriggerOperations +from .operations import TriggerRunOperations +from .operations import DataFlowOperations +from .operations import DataFlowDebugSessionOperations +from .operations import SqlScriptOperations +from .operations import SparkJobDefinitionOperations +from .operations import NotebookOperations +from .operations import WorkspaceOperations +from .operations import SqlPoolsOperations +from .operations import BigDataPoolsOperations +from .operations import IntegrationRuntimesOperations from .. import models @@ -34,27 +38,35 @@ class ArtifactsClient(object): """ArtifactsClient. :ivar linked_service: LinkedServiceOperations operations - :vartype linked_service: azure.synapse.artifacts.aio.operations_async.LinkedServiceOperations + :vartype linked_service: azure.synapse.artifacts.aio.operations.LinkedServiceOperations :ivar dataset: DatasetOperations operations - :vartype dataset: azure.synapse.artifacts.aio.operations_async.DatasetOperations + :vartype dataset: azure.synapse.artifacts.aio.operations.DatasetOperations :ivar pipeline: PipelineOperations operations - :vartype pipeline: azure.synapse.artifacts.aio.operations_async.PipelineOperations + :vartype pipeline: azure.synapse.artifacts.aio.operations.PipelineOperations :ivar pipeline_run: PipelineRunOperations operations - :vartype pipeline_run: azure.synapse.artifacts.aio.operations_async.PipelineRunOperations + :vartype pipeline_run: azure.synapse.artifacts.aio.operations.PipelineRunOperations :ivar trigger: TriggerOperations operations - :vartype trigger: azure.synapse.artifacts.aio.operations_async.TriggerOperations + :vartype trigger: azure.synapse.artifacts.aio.operations.TriggerOperations :ivar trigger_run: TriggerRunOperations operations - :vartype trigger_run: azure.synapse.artifacts.aio.operations_async.TriggerRunOperations + :vartype trigger_run: azure.synapse.artifacts.aio.operations.TriggerRunOperations :ivar data_flow: DataFlowOperations operations - :vartype data_flow: azure.synapse.artifacts.aio.operations_async.DataFlowOperations + :vartype data_flow: azure.synapse.artifacts.aio.operations.DataFlowOperations :ivar data_flow_debug_session: DataFlowDebugSessionOperations operations - :vartype data_flow_debug_session: azure.synapse.artifacts.aio.operations_async.DataFlowDebugSessionOperations + :vartype data_flow_debug_session: azure.synapse.artifacts.aio.operations.DataFlowDebugSessionOperations :ivar sql_script: SqlScriptOperations operations - :vartype sql_script: azure.synapse.artifacts.aio.operations_async.SqlScriptOperations + :vartype sql_script: azure.synapse.artifacts.aio.operations.SqlScriptOperations :ivar spark_job_definition: SparkJobDefinitionOperations operations - :vartype spark_job_definition: azure.synapse.artifacts.aio.operations_async.SparkJobDefinitionOperations + :vartype spark_job_definition: azure.synapse.artifacts.aio.operations.SparkJobDefinitionOperations :ivar notebook: NotebookOperations operations - :vartype notebook: azure.synapse.artifacts.aio.operations_async.NotebookOperations + :vartype notebook: azure.synapse.artifacts.aio.operations.NotebookOperations + :ivar workspace: WorkspaceOperations operations + :vartype workspace: azure.synapse.artifacts.aio.operations.WorkspaceOperations + :ivar sql_pools: SqlPoolsOperations operations + :vartype sql_pools: azure.synapse.artifacts.aio.operations.SqlPoolsOperations + :ivar big_data_pools: BigDataPoolsOperations operations + :vartype big_data_pools: azure.synapse.artifacts.aio.operations.BigDataPoolsOperations + :ivar integration_runtimes: IntegrationRuntimesOperations operations + :vartype integration_runtimes: azure.synapse.artifacts.aio.operations.IntegrationRuntimesOperations :param credential: Credential needed for the client to connect to Azure. :type credential: ~azure.core.credentials_async.AsyncTokenCredential :param endpoint: The workspace development endpoint, for example https://myworkspace.dev.azuresynapse.net. @@ -74,6 +86,7 @@ def __init__( client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} self._serialize = Serializer(client_models) + self._serialize.client_side_validation = False self._deserialize = Deserializer(client_models) self.linked_service = LinkedServiceOperations( @@ -98,6 +111,14 @@ def __init__( self._client, self._config, self._serialize, self._deserialize) self.notebook = NotebookOperations( self._client, self._config, self._serialize, self._deserialize) + self.workspace = WorkspaceOperations( + self._client, self._config, self._serialize, self._deserialize) + self.sql_pools = SqlPoolsOperations( + self._client, self._config, self._serialize, self._deserialize) + self.big_data_pools = BigDataPoolsOperations( + self._client, self._config, self._serialize, self._deserialize) + self.integration_runtimes = IntegrationRuntimesOperations( + self._client, self._config, self._serialize, self._deserialize) async def close(self) -> None: await self._client.close() diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/_configuration_async.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/_configuration.py similarity index 93% rename from sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/_configuration_async.py rename to sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/_configuration.py index 760c01d1cbcb..9c9e4c5fa938 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/_configuration_async.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/_configuration.py @@ -45,8 +45,7 @@ def __init__( self.credential = credential self.endpoint = endpoint self.api_version = "2019-06-01-preview" - self.credential_scopes = ['https://dev.azuresynapse.net/.default'] - self.credential_scopes.extend(kwargs.pop('credential_scopes', [])) + self.credential_scopes = kwargs.pop('credential_scopes', ['https://dev.azuresynapse.net/.default']) kwargs.setdefault('sdk_moniker', 'synapse/{}'.format(VERSION)) self._configure(**kwargs) @@ -58,6 +57,7 @@ def _configure( self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get('http_logging_policy') or policies.HttpLoggingPolicy(**kwargs) self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs) self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs) diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/__init__.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/__init__.py new file mode 100644 index 000000000000..f5afded7d4f5 --- /dev/null +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/__init__.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._linked_service_operations import LinkedServiceOperations +from ._dataset_operations import DatasetOperations +from ._pipeline_operations import PipelineOperations +from ._pipeline_run_operations import PipelineRunOperations +from ._trigger_operations import TriggerOperations +from ._trigger_run_operations import TriggerRunOperations +from ._data_flow_operations import DataFlowOperations +from ._data_flow_debug_session_operations import DataFlowDebugSessionOperations +from ._sql_script_operations import SqlScriptOperations +from ._spark_job_definition_operations import SparkJobDefinitionOperations +from ._notebook_operations import NotebookOperations +from ._workspace_operations import WorkspaceOperations +from ._sql_pools_operations import SqlPoolsOperations +from ._big_data_pools_operations import BigDataPoolsOperations +from ._integration_runtimes_operations import IntegrationRuntimesOperations + +__all__ = [ + 'LinkedServiceOperations', + 'DatasetOperations', + 'PipelineOperations', + 'PipelineRunOperations', + 'TriggerOperations', + 'TriggerRunOperations', + 'DataFlowOperations', + 'DataFlowDebugSessionOperations', + 'SqlScriptOperations', + 'SparkJobDefinitionOperations', + 'NotebookOperations', + 'WorkspaceOperations', + 'SqlPoolsOperations', + 'BigDataPoolsOperations', + 'IntegrationRuntimesOperations', +] diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_big_data_pools_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_big_data_pools_operations.py new file mode 100644 index 000000000000..8762d45c2248 --- /dev/null +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_big_data_pools_operations.py @@ -0,0 +1,146 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class BigDataPoolsOperations: + """BigDataPoolsOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.synapse.artifacts.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def list( + self, + **kwargs + ) -> "models.BigDataPoolResourceInfoListResult": + """List Big Data Pools. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: BigDataPoolResourceInfoListResult, or the result of cls(response) + :rtype: ~azure.synapse.artifacts.models.BigDataPoolResourceInfoListResult + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.BigDataPoolResourceInfoListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2019-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ErrorContract, response) + raise HttpResponseError(response=response, model=error) + + deserialized = self._deserialize('BigDataPoolResourceInfoListResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + list.metadata = {'url': '/bigDataPools'} # type: ignore + + async def get( + self, + big_data_pool_name: str, + **kwargs + ) -> "models.BigDataPoolResourceInfo": + """Get Big Data Pool. + + :param big_data_pool_name: The Big Data Pool name. + :type big_data_pool_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: BigDataPoolResourceInfo, or the result of cls(response) + :rtype: ~azure.synapse.artifacts.models.BigDataPoolResourceInfo + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.BigDataPoolResourceInfo"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2019-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + 'bigDataPoolName': self._serialize.url("big_data_pool_name", big_data_pool_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ErrorContract, response) + raise HttpResponseError(response=response, model=error) + + deserialized = self._deserialize('BigDataPoolResourceInfo', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/bigDataPools/{bigDataPoolName}'} # type: ignore diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations_async/_data_flow_debug_session_operations_async.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_data_flow_debug_session_operations.py similarity index 93% rename from sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations_async/_data_flow_debug_session_operations_async.py rename to sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_data_flow_debug_session_operations.py index 7da11e64c8be..cba447f9f2cb 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations_async/_data_flow_debug_session_operations_async.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_data_flow_debug_session_operations.py @@ -9,7 +9,7 @@ import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod @@ -48,10 +48,13 @@ async def _create_data_flow_debug_session_initial( **kwargs ) -> Optional["models.CreateDataFlowDebugSessionResponse"]: cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.CreateDataFlowDebugSessionResponse"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self._create_data_flow_debug_session_initial.metadata['url'] # type: ignore @@ -67,13 +70,12 @@ async def _create_data_flow_debug_session_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(request, 'CreateDataFlowDebugSessionRequest') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -165,14 +167,17 @@ def query_data_flow_debug_sessions_by_workspace( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.QueryDataFlowDebugSessionsResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -236,10 +241,13 @@ async def add_data_flow( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.AddDataFlowToDebugSessionResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.add_data_flow.metadata['url'] # type: ignore @@ -255,13 +263,12 @@ async def add_data_flow( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(request, 'DataFlowDebugPackage') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -293,10 +300,13 @@ async def delete_data_flow_debug_session( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.delete_data_flow_debug_session.metadata['url'] # type: ignore @@ -312,12 +322,12 @@ async def delete_data_flow_debug_session( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(request, 'DeleteDataFlowDebugSessionRequest') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -337,10 +347,13 @@ async def _execute_command_initial( **kwargs ) -> Optional["models.DataFlowDebugCommandResponse"]: cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.DataFlowDebugCommandResponse"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self._execute_command_initial.metadata['url'] # type: ignore @@ -356,13 +369,12 @@ async def _execute_command_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(request, 'DataFlowDebugCommandRequest') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations_async/_data_flow_operations_async.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_data_flow_operations.py similarity index 62% rename from sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations_async/_data_flow_operations_async.py rename to sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_data_flow_operations.py index 3f7c007efd0c..52a86db75ccd 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations_async/_data_flow_operations_async.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_data_flow_operations.py @@ -5,13 +5,15 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.polling.async_base_polling import AsyncLROBasePolling from ... import models @@ -40,37 +42,26 @@ def __init__(self, client, config, serializer, deserializer) -> None: self._deserialize = deserializer self._config = config - async def create_or_update_data_flow( + async def _create_or_update_data_flow_initial( self, data_flow_name: str, properties: "models.DataFlow", if_match: Optional[str] = None, **kwargs - ) -> "models.DataFlowResource": - """Creates or updates a data flow. - - :param data_flow_name: The data flow name. - :type data_flow_name: str - :param properties: Data flow properties. - :type properties: ~azure.synapse.artifacts.models.DataFlow - :param if_match: ETag of the data flow entity. Should only be specified for update, for which - it should match existing entity or can be * for unconditional update. - :type if_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: DataFlowResource, or the result of cls(response) - :rtype: ~azure.synapse.artifacts.models.DataFlowResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + ) -> Optional["models.DataFlowResource"]: + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.DataFlowResource"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) _data_flow = models.DataFlowResource(properties=properties) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL - url = self.create_or_update_data_flow.metadata['url'] # type: ignore + url = self._create_or_update_data_flow_initial.metadata['url'] # type: ignore path_format_arguments = { 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), 'dataFlowName': self._serialize.url("data_flow_name", data_flow_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), @@ -86,28 +77,95 @@ async def create_or_update_data_flow( if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(_data_flow, 'DataFlowResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [200]: + if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.CloudError, response) raise HttpResponseError(response=response, model=error) - deserialized = self._deserialize('DataFlowResource', pipeline_response) + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('DataFlowResource', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - create_or_update_data_flow.metadata = {'url': '/dataflows/{dataFlowName}'} # type: ignore + _create_or_update_data_flow_initial.metadata = {'url': '/dataflows/{dataFlowName}'} # type: ignore + + async def begin_create_or_update_data_flow( + self, + data_flow_name: str, + properties: "models.DataFlow", + if_match: Optional[str] = None, + **kwargs + ) -> AsyncLROPoller["models.DataFlowResource"]: + """Creates or updates a data flow. + + :param data_flow_name: The data flow name. + :type data_flow_name: str + :param properties: Data flow properties. + :type properties: ~azure.synapse.artifacts.models.DataFlow + :param if_match: ETag of the data flow entity. Should only be specified for update, for which + it should match existing entity or can be * for unconditional update. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either DataFlowResource or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.synapse.artifacts.models.DataFlowResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', False) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowResource"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_or_update_data_flow_initial( + data_flow_name=data_flow_name, + properties=properties, + if_match=if_match, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('DataFlowResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: polling_method = AsyncLROBasePolling(lro_delay, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_create_or_update_data_flow.metadata = {'url': '/dataflows/{dataFlowName}'} # type: ignore async def get_data_flow( self, @@ -128,9 +186,12 @@ async def get_data_flow( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self.get_data_flow.metadata['url'] # type: ignore @@ -148,7 +209,7 @@ async def get_data_flow( header_parameters = {} # type: Dict[str, Any] if if_none_match is not None: header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -167,27 +228,21 @@ async def get_data_flow( return deserialized get_data_flow.metadata = {'url': '/dataflows/{dataFlowName}'} # type: ignore - async def delete_data_flow( + async def _delete_data_flow_initial( self, data_flow_name: str, **kwargs ) -> None: - """Deletes a data flow. - - :param data_flow_name: The data flow name. - :type data_flow_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL - url = self.delete_data_flow.metadata['url'] # type: ignore + url = self._delete_data_flow_initial.metadata['url'] # type: ignore path_format_arguments = { 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), 'dataFlowName': self._serialize.url("data_flow_name", data_flow_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), @@ -200,12 +255,13 @@ async def delete_data_flow( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [200, 204]: + if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.CloudError, response) raise HttpResponseError(response=response, model=error) @@ -213,7 +269,61 @@ async def delete_data_flow( if cls: return cls(pipeline_response, None, {}) - delete_data_flow.metadata = {'url': '/dataflows/{dataFlowName}'} # type: ignore + _delete_data_flow_initial.metadata = {'url': '/dataflows/{dataFlowName}'} # type: ignore + + async def begin_delete_data_flow( + self, + data_flow_name: str, + **kwargs + ) -> AsyncLROPoller[None]: + """Deletes a data flow. + + :param data_flow_name: The data flow name. + :type data_flow_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', False) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_data_flow_initial( + data_flow_name=data_flow_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: polling_method = AsyncLROBasePolling(lro_delay, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_delete_data_flow.metadata = {'url': '/dataflows/{dataFlowName}'} # type: ignore def get_data_flows_by_workspace( self, @@ -227,14 +337,17 @@ def get_data_flows_by_workspace( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations_async/_dataset_operations_async.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_dataset_operations.py similarity index 63% rename from sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations_async/_dataset_operations_async.py rename to sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_dataset_operations.py index 1fb9dab66407..a0981e4688da 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations_async/_dataset_operations_async.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_dataset_operations.py @@ -5,13 +5,15 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.polling.async_base_polling import AsyncLROBasePolling from ... import models @@ -52,14 +54,17 @@ def get_datasets_by_workspace( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.DatasetListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -108,37 +113,26 @@ async def get_next(next_link=None): ) get_datasets_by_workspace.metadata = {'url': '/datasets'} # type: ignore - async def create_or_update_dataset( + async def _create_or_update_dataset_initial( self, dataset_name: str, properties: "models.Dataset", if_match: Optional[str] = None, **kwargs - ) -> "models.DatasetResource": - """Creates or updates a dataset. - - :param dataset_name: The dataset name. - :type dataset_name: str - :param properties: Dataset properties. - :type properties: ~azure.synapse.artifacts.models.Dataset - :param if_match: ETag of the dataset entity. Should only be specified for update, for which it - should match existing entity or can be * for unconditional update. - :type if_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: DatasetResource, or the result of cls(response) - :rtype: ~azure.synapse.artifacts.models.DatasetResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.DatasetResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + ) -> Optional["models.DatasetResource"]: + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.DatasetResource"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) _dataset = models.DatasetResource(properties=properties) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL - url = self.create_or_update_dataset.metadata['url'] # type: ignore + url = self._create_or_update_dataset_initial.metadata['url'] # type: ignore path_format_arguments = { 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), 'datasetName': self._serialize.url("dataset_name", dataset_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), @@ -154,28 +148,95 @@ async def create_or_update_dataset( if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(_dataset, 'DatasetResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [200]: + if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.CloudError, response) raise HttpResponseError(response=response, model=error) - deserialized = self._deserialize('DatasetResource', pipeline_response) + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('DatasetResource', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - create_or_update_dataset.metadata = {'url': '/datasets/{datasetName}'} # type: ignore + _create_or_update_dataset_initial.metadata = {'url': '/datasets/{datasetName}'} # type: ignore + + async def begin_create_or_update_dataset( + self, + dataset_name: str, + properties: "models.Dataset", + if_match: Optional[str] = None, + **kwargs + ) -> AsyncLROPoller["models.DatasetResource"]: + """Creates or updates a dataset. + + :param dataset_name: The dataset name. + :type dataset_name: str + :param properties: Dataset properties. + :type properties: ~azure.synapse.artifacts.models.Dataset + :param if_match: ETag of the dataset entity. Should only be specified for update, for which it + should match existing entity or can be * for unconditional update. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either DatasetResource or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.synapse.artifacts.models.DatasetResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', False) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["models.DatasetResource"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_or_update_dataset_initial( + dataset_name=dataset_name, + properties=properties, + if_match=if_match, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('DatasetResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: polling_method = AsyncLROBasePolling(lro_delay, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_create_or_update_dataset.metadata = {'url': '/datasets/{datasetName}'} # type: ignore async def get_dataset( self, @@ -196,9 +257,12 @@ async def get_dataset( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.DatasetResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self.get_dataset.metadata['url'] # type: ignore @@ -216,7 +280,7 @@ async def get_dataset( header_parameters = {} # type: Dict[str, Any] if if_none_match is not None: header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -237,27 +301,21 @@ async def get_dataset( return deserialized get_dataset.metadata = {'url': '/datasets/{datasetName}'} # type: ignore - async def delete_dataset( + async def _delete_dataset_initial( self, dataset_name: str, **kwargs ) -> None: - """Deletes a dataset. - - :param dataset_name: The dataset name. - :type dataset_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL - url = self.delete_dataset.metadata['url'] # type: ignore + url = self._delete_dataset_initial.metadata['url'] # type: ignore path_format_arguments = { 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), 'datasetName': self._serialize.url("dataset_name", dataset_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), @@ -270,12 +328,13 @@ async def delete_dataset( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [200, 204]: + if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.CloudError, response) raise HttpResponseError(response=response, model=error) @@ -283,4 +342,58 @@ async def delete_dataset( if cls: return cls(pipeline_response, None, {}) - delete_dataset.metadata = {'url': '/datasets/{datasetName}'} # type: ignore + _delete_dataset_initial.metadata = {'url': '/datasets/{datasetName}'} # type: ignore + + async def begin_delete_dataset( + self, + dataset_name: str, + **kwargs + ) -> AsyncLROPoller[None]: + """Deletes a dataset. + + :param dataset_name: The dataset name. + :type dataset_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', False) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_dataset_initial( + dataset_name=dataset_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: polling_method = AsyncLROBasePolling(lro_delay, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_delete_dataset.metadata = {'url': '/datasets/{datasetName}'} # type: ignore diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_integration_runtimes_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_integration_runtimes_operations.py new file mode 100644 index 000000000000..aed2b9319436 --- /dev/null +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_integration_runtimes_operations.py @@ -0,0 +1,146 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class IntegrationRuntimesOperations: + """IntegrationRuntimesOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.synapse.artifacts.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def list( + self, + **kwargs + ) -> "models.IntegrationRuntimeListResponse": + """List Integration Runtimes. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IntegrationRuntimeListResponse, or the result of cls(response) + :rtype: ~azure.synapse.artifacts.models.IntegrationRuntimeListResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeListResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2019-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ErrorContract, response) + raise HttpResponseError(response=response, model=error) + + deserialized = self._deserialize('IntegrationRuntimeListResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + list.metadata = {'url': '/integrationRuntimes'} # type: ignore + + async def get( + self, + integration_runtime_name: str, + **kwargs + ) -> "models.IntegrationRuntimeResource": + """Get Integration Runtime. + + :param integration_runtime_name: The Integration Runtime name. + :type integration_runtime_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IntegrationRuntimeResource, or the result of cls(response) + :rtype: ~azure.synapse.artifacts.models.IntegrationRuntimeResource + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeResource"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2019-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ErrorContract, response) + raise HttpResponseError(response=response, model=error) + + deserialized = self._deserialize('IntegrationRuntimeResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/integrationRuntimes/{integrationRuntimeName}'} # type: ignore diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations_async/_linked_service_operations_async.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_linked_service_operations.py similarity index 63% rename from sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations_async/_linked_service_operations_async.py rename to sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_linked_service_operations.py index e4dbb57823fd..7bd876f6351f 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations_async/_linked_service_operations_async.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_linked_service_operations.py @@ -5,13 +5,15 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.polling.async_base_polling import AsyncLROBasePolling from ... import models @@ -52,14 +54,17 @@ def get_linked_services_by_workspace( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.LinkedServiceListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -108,37 +113,26 @@ async def get_next(next_link=None): ) get_linked_services_by_workspace.metadata = {'url': '/linkedservices'} # type: ignore - async def create_or_update_linked_service( + async def _create_or_update_linked_service_initial( self, linked_service_name: str, properties: "models.LinkedService", if_match: Optional[str] = None, **kwargs - ) -> "models.LinkedServiceResource": - """Creates or updates a linked service. - - :param linked_service_name: The linked service name. - :type linked_service_name: str - :param properties: Properties of linked service. - :type properties: ~azure.synapse.artifacts.models.LinkedService - :param if_match: ETag of the linkedService entity. Should only be specified for update, for - which it should match existing entity or can be * for unconditional update. - :type if_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: LinkedServiceResource, or the result of cls(response) - :rtype: ~azure.synapse.artifacts.models.LinkedServiceResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.LinkedServiceResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + ) -> Optional["models.LinkedServiceResource"]: + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.LinkedServiceResource"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) _linked_service = models.LinkedServiceResource(properties=properties) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL - url = self.create_or_update_linked_service.metadata['url'] # type: ignore + url = self._create_or_update_linked_service_initial.metadata['url'] # type: ignore path_format_arguments = { 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), 'linkedServiceName': self._serialize.url("linked_service_name", linked_service_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), @@ -154,28 +148,95 @@ async def create_or_update_linked_service( if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(_linked_service, 'LinkedServiceResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [200]: + if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.CloudError, response) raise HttpResponseError(response=response, model=error) - deserialized = self._deserialize('LinkedServiceResource', pipeline_response) + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('LinkedServiceResource', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - create_or_update_linked_service.metadata = {'url': '/linkedservices/{linkedServiceName}'} # type: ignore + _create_or_update_linked_service_initial.metadata = {'url': '/linkedservices/{linkedServiceName}'} # type: ignore + + async def begin_create_or_update_linked_service( + self, + linked_service_name: str, + properties: "models.LinkedService", + if_match: Optional[str] = None, + **kwargs + ) -> AsyncLROPoller["models.LinkedServiceResource"]: + """Creates or updates a linked service. + + :param linked_service_name: The linked service name. + :type linked_service_name: str + :param properties: Properties of linked service. + :type properties: ~azure.synapse.artifacts.models.LinkedService + :param if_match: ETag of the linkedService entity. Should only be specified for update, for + which it should match existing entity or can be * for unconditional update. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either LinkedServiceResource or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.synapse.artifacts.models.LinkedServiceResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', False) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["models.LinkedServiceResource"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_or_update_linked_service_initial( + linked_service_name=linked_service_name, + properties=properties, + if_match=if_match, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('LinkedServiceResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: polling_method = AsyncLROBasePolling(lro_delay, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_create_or_update_linked_service.metadata = {'url': '/linkedservices/{linkedServiceName}'} # type: ignore async def get_linked_service( self, @@ -197,9 +258,12 @@ async def get_linked_service( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.LinkedServiceResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self.get_linked_service.metadata['url'] # type: ignore @@ -217,7 +281,7 @@ async def get_linked_service( header_parameters = {} # type: Dict[str, Any] if if_none_match is not None: header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -238,27 +302,21 @@ async def get_linked_service( return deserialized get_linked_service.metadata = {'url': '/linkedservices/{linkedServiceName}'} # type: ignore - async def delete_linked_service( + async def _delete_linked_service_initial( self, linked_service_name: str, **kwargs ) -> None: - """Deletes a linked service. - - :param linked_service_name: The linked service name. - :type linked_service_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL - url = self.delete_linked_service.metadata['url'] # type: ignore + url = self._delete_linked_service_initial.metadata['url'] # type: ignore path_format_arguments = { 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), 'linkedServiceName': self._serialize.url("linked_service_name", linked_service_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), @@ -271,12 +329,13 @@ async def delete_linked_service( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [200, 204]: + if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.CloudError, response) raise HttpResponseError(response=response, model=error) @@ -284,4 +343,58 @@ async def delete_linked_service( if cls: return cls(pipeline_response, None, {}) - delete_linked_service.metadata = {'url': '/linkedservices/{linkedServiceName}'} # type: ignore + _delete_linked_service_initial.metadata = {'url': '/linkedservices/{linkedServiceName}'} # type: ignore + + async def begin_delete_linked_service( + self, + linked_service_name: str, + **kwargs + ) -> AsyncLROPoller[None]: + """Deletes a linked service. + + :param linked_service_name: The linked service name. + :type linked_service_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', False) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_linked_service_initial( + linked_service_name=linked_service_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: polling_method = AsyncLROBasePolling(lro_delay, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_delete_linked_service.metadata = {'url': '/linkedservices/{linkedServiceName}'} # type: ignore diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations_async/_notebook_operations_async.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_notebook_operations.py similarity index 67% rename from sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations_async/_notebook_operations_async.py rename to sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_notebook_operations.py index 68dd787b9868..8635059dfee7 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations_async/_notebook_operations_async.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_notebook_operations.py @@ -5,13 +5,15 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.polling.async_base_polling import AsyncLROBasePolling from ... import models @@ -52,14 +54,17 @@ def get_notebooks_by_workspace( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.NotebookListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -120,14 +125,17 @@ def get_notebook_summary_by_work_space( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.NotebookListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -176,37 +184,26 @@ async def get_next(next_link=None): ) get_notebook_summary_by_work_space.metadata = {'url': '/notebooks/summary'} # type: ignore - async def create_or_update_notebook( + async def _create_or_update_notebook_initial( self, notebook_name: str, properties: "models.Notebook", if_match: Optional[str] = None, **kwargs - ) -> "models.NotebookResource": - """Creates or updates a Note Book. - - :param notebook_name: The notebook name. - :type notebook_name: str - :param properties: Properties of Notebook. - :type properties: ~azure.synapse.artifacts.models.Notebook - :param if_match: ETag of the Note book entity. Should only be specified for update, for which - it should match existing entity or can be * for unconditional update. - :type if_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: NotebookResource, or the result of cls(response) - :rtype: ~azure.synapse.artifacts.models.NotebookResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.NotebookResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + ) -> Optional["models.NotebookResource"]: + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.NotebookResource"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) _notebook = models.NotebookResource(properties=properties) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL - url = self.create_or_update_notebook.metadata['url'] # type: ignore + url = self._create_or_update_notebook_initial.metadata['url'] # type: ignore path_format_arguments = { 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), 'notebookName': self._serialize.url("notebook_name", notebook_name, 'str'), @@ -222,28 +219,95 @@ async def create_or_update_notebook( if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(_notebook, 'NotebookResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [200]: + if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.CloudError, response) raise HttpResponseError(response=response, model=error) - deserialized = self._deserialize('NotebookResource', pipeline_response) + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('NotebookResource', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - create_or_update_notebook.metadata = {'url': '/notebooks/{notebookName}'} # type: ignore + _create_or_update_notebook_initial.metadata = {'url': '/notebooks/{notebookName}'} # type: ignore + + async def begin_create_or_update_notebook( + self, + notebook_name: str, + properties: "models.Notebook", + if_match: Optional[str] = None, + **kwargs + ) -> AsyncLROPoller["models.NotebookResource"]: + """Creates or updates a Note Book. + + :param notebook_name: The notebook name. + :type notebook_name: str + :param properties: Properties of Notebook. + :type properties: ~azure.synapse.artifacts.models.Notebook + :param if_match: ETag of the Note book entity. Should only be specified for update, for which + it should match existing entity or can be * for unconditional update. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either NotebookResource or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.synapse.artifacts.models.NotebookResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', False) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["models.NotebookResource"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_or_update_notebook_initial( + notebook_name=notebook_name, + properties=properties, + if_match=if_match, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('NotebookResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: polling_method = AsyncLROBasePolling(lro_delay, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_create_or_update_notebook.metadata = {'url': '/notebooks/{notebookName}'} # type: ignore async def get_notebook( self, @@ -264,9 +328,12 @@ async def get_notebook( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.NotebookResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self.get_notebook.metadata['url'] # type: ignore @@ -284,7 +351,7 @@ async def get_notebook( header_parameters = {} # type: Dict[str, Any] if if_none_match is not None: header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -305,27 +372,21 @@ async def get_notebook( return deserialized get_notebook.metadata = {'url': '/notebooks/{notebookName}'} # type: ignore - async def delete_notebook( + async def _delete_notebook_initial( self, notebook_name: str, **kwargs ) -> None: - """Deletes a Note book. - - :param notebook_name: The notebook name. - :type notebook_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL - url = self.delete_notebook.metadata['url'] # type: ignore + url = self._delete_notebook_initial.metadata['url'] # type: ignore path_format_arguments = { 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), 'notebookName': self._serialize.url("notebook_name", notebook_name, 'str'), @@ -338,12 +399,13 @@ async def delete_notebook( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [200, 204]: + if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.CloudError, response) raise HttpResponseError(response=response, model=error) @@ -351,4 +413,58 @@ async def delete_notebook( if cls: return cls(pipeline_response, None, {}) - delete_notebook.metadata = {'url': '/notebooks/{notebookName}'} # type: ignore + _delete_notebook_initial.metadata = {'url': '/notebooks/{notebookName}'} # type: ignore + + async def begin_delete_notebook( + self, + notebook_name: str, + **kwargs + ) -> AsyncLROPoller[None]: + """Deletes a Note book. + + :param notebook_name: The notebook name. + :type notebook_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', False) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_notebook_initial( + notebook_name=notebook_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: polling_method = AsyncLROBasePolling(lro_delay, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_delete_notebook.metadata = {'url': '/notebooks/{notebookName}'} # type: ignore diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations_async/_pipeline_operations_async.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_pipeline_operations.py similarity index 68% rename from sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations_async/_pipeline_operations_async.py rename to sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_pipeline_operations.py index 956a5c55db3b..892d72270055 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations_async/_pipeline_operations_async.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_pipeline_operations.py @@ -5,13 +5,15 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.polling.async_base_polling import AsyncLROBasePolling from ... import models @@ -52,14 +54,17 @@ def get_pipelines_by_workspace( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -108,35 +113,24 @@ async def get_next(next_link=None): ) get_pipelines_by_workspace.metadata = {'url': '/pipelines'} # type: ignore - async def create_or_update_pipeline( + async def _create_or_update_pipeline_initial( self, pipeline_name: str, pipeline: "models.PipelineResource", if_match: Optional[str] = None, **kwargs - ) -> "models.PipelineResource": - """Creates or updates a pipeline. - - :param pipeline_name: The pipeline name. - :type pipeline_name: str - :param pipeline: Pipeline resource definition. - :type pipeline: ~azure.synapse.artifacts.models.PipelineResource - :param if_match: ETag of the pipeline entity. Should only be specified for update, for which - it should match existing entity or can be * for unconditional update. - :type if_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: PipelineResource, or the result of cls(response) - :rtype: ~azure.synapse.artifacts.models.PipelineResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + ) -> Optional["models.PipelineResource"]: + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.PipelineResource"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL - url = self.create_or_update_pipeline.metadata['url'] # type: ignore + url = self._create_or_update_pipeline_initial.metadata['url'] # type: ignore path_format_arguments = { 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), 'pipelineName': self._serialize.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), @@ -152,28 +146,95 @@ async def create_or_update_pipeline( if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(pipeline, 'PipelineResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [200]: + if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.CloudError, response) raise HttpResponseError(response=response, model=error) - deserialized = self._deserialize('PipelineResource', pipeline_response) + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('PipelineResource', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - create_or_update_pipeline.metadata = {'url': '/pipelines/{pipelineName}'} # type: ignore + _create_or_update_pipeline_initial.metadata = {'url': '/pipelines/{pipelineName}'} # type: ignore + + async def begin_create_or_update_pipeline( + self, + pipeline_name: str, + pipeline: "models.PipelineResource", + if_match: Optional[str] = None, + **kwargs + ) -> AsyncLROPoller["models.PipelineResource"]: + """Creates or updates a pipeline. + + :param pipeline_name: The pipeline name. + :type pipeline_name: str + :param pipeline: Pipeline resource definition. + :type pipeline: ~azure.synapse.artifacts.models.PipelineResource + :param if_match: ETag of the pipeline entity. Should only be specified for update, for which + it should match existing entity or can be * for unconditional update. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either PipelineResource or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.synapse.artifacts.models.PipelineResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', False) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineResource"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_or_update_pipeline_initial( + pipeline_name=pipeline_name, + pipeline=pipeline, + if_match=if_match, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('PipelineResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: polling_method = AsyncLROBasePolling(lro_delay, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_create_or_update_pipeline.metadata = {'url': '/pipelines/{pipelineName}'} # type: ignore async def get_pipeline( self, @@ -194,9 +255,12 @@ async def get_pipeline( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.PipelineResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self.get_pipeline.metadata['url'] # type: ignore @@ -214,7 +278,7 @@ async def get_pipeline( header_parameters = {} # type: Dict[str, Any] if if_none_match is not None: header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -235,27 +299,21 @@ async def get_pipeline( return deserialized get_pipeline.metadata = {'url': '/pipelines/{pipelineName}'} # type: ignore - async def delete_pipeline( + async def _delete_pipeline_initial( self, pipeline_name: str, **kwargs ) -> None: - """Deletes a pipeline. - - :param pipeline_name: The pipeline name. - :type pipeline_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL - url = self.delete_pipeline.metadata['url'] # type: ignore + url = self._delete_pipeline_initial.metadata['url'] # type: ignore path_format_arguments = { 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), 'pipelineName': self._serialize.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), @@ -268,12 +326,13 @@ async def delete_pipeline( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [200, 204]: + if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.CloudError, response) raise HttpResponseError(response=response, model=error) @@ -281,7 +340,61 @@ async def delete_pipeline( if cls: return cls(pipeline_response, None, {}) - delete_pipeline.metadata = {'url': '/pipelines/{pipelineName}'} # type: ignore + _delete_pipeline_initial.metadata = {'url': '/pipelines/{pipelineName}'} # type: ignore + + async def begin_delete_pipeline( + self, + pipeline_name: str, + **kwargs + ) -> AsyncLROPoller[None]: + """Deletes a pipeline. + + :param pipeline_name: The pipeline name. + :type pipeline_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', False) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_pipeline_initial( + pipeline_name=pipeline_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: polling_method = AsyncLROBasePolling(lro_delay, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_delete_pipeline.metadata = {'url': '/pipelines/{pipelineName}'} # type: ignore async def create_pipeline_run( self, @@ -314,10 +427,13 @@ async def create_pipeline_run( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.CreateRunResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.create_pipeline_run.metadata['url'] # type: ignore @@ -340,7 +456,7 @@ async def create_pipeline_run( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] if parameters is not None: @@ -349,11 +465,10 @@ async def create_pipeline_run( body_content = None body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [200]: + if response.status_code not in [202]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.CloudError, response) raise HttpResponseError(response=response, model=error) diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations_async/_pipeline_run_operations_async.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_pipeline_run_operations.py similarity index 91% rename from sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations_async/_pipeline_run_operations_async.py rename to sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_pipeline_run_operations.py index 966fc280b177..8651bf55c955 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations_async/_pipeline_run_operations_async.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_pipeline_run_operations.py @@ -8,7 +8,7 @@ from typing import Any, Callable, Dict, Generic, Optional, TypeVar import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest @@ -54,10 +54,13 @@ async def query_pipeline_runs_by_workspace( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineRunsQueryResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.query_pipeline_runs_by_workspace.metadata['url'] # type: ignore @@ -73,13 +76,12 @@ async def query_pipeline_runs_by_workspace( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(filter_parameters, 'RunFilterParameters') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -111,9 +113,12 @@ async def get_pipeline_run( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineRun"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self.get_pipeline_run.metadata['url'] # type: ignore @@ -129,7 +134,7 @@ async def get_pipeline_run( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -169,10 +174,13 @@ async def query_activity_runs( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ActivityRunsQueryResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.query_activity_runs.metadata['url'] # type: ignore @@ -190,13 +198,12 @@ async def query_activity_runs( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(filter_parameters, 'RunFilterParameters') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -232,9 +239,12 @@ async def cancel_pipeline_run( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self.cancel_pipeline_run.metadata['url'] # type: ignore @@ -252,6 +262,7 @@ async def cancel_pipeline_run( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations_async/_spark_job_definition_operations_async.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_spark_job_definition_operations.py similarity index 93% rename from sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations_async/_spark_job_definition_operations_async.py rename to sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_spark_job_definition_operations.py index d0e34edc20ed..0f3bf8bffe85 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations_async/_spark_job_definition_operations_async.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_spark_job_definition_operations.py @@ -9,7 +9,7 @@ import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod @@ -54,14 +54,17 @@ def get_spark_job_definitions_by_workspace( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SparkJobDefinitionsListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -132,12 +135,15 @@ async def create_or_update_spark_job_definition( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SparkJobDefinitionResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) _spark_job_definition = models.SparkJobDefinitionResource(properties=properties) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.create_or_update_spark_job_definition.metadata['url'] # type: ignore @@ -156,13 +162,12 @@ async def create_or_update_spark_job_definition( if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(_spark_job_definition, 'SparkJobDefinitionResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -199,9 +204,12 @@ async def get_spark_job_definition( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.SparkJobDefinitionResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self.get_spark_job_definition.metadata['url'] # type: ignore @@ -219,7 +227,7 @@ async def get_spark_job_definition( header_parameters = {} # type: Dict[str, Any] if if_none_match is not None: header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -255,9 +263,12 @@ async def delete_spark_job_definition( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self.delete_spark_job_definition.metadata['url'] # type: ignore @@ -273,6 +284,7 @@ async def delete_spark_job_definition( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -294,9 +306,12 @@ async def _execute_spark_job_definition_initial( **kwargs ) -> "models.SparkBatchJob": cls = kwargs.pop('cls', None) # type: ClsType["models.SparkBatchJob"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self._execute_spark_job_definition_initial.metadata['url'] # type: ignore @@ -312,7 +327,7 @@ async def _execute_spark_job_definition_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -398,12 +413,15 @@ async def _debug_spark_job_definition_initial( **kwargs ) -> "models.SparkBatchJob": cls = kwargs.pop('cls', None) # type: ClsType["models.SparkBatchJob"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) _spark_job_definition_azure_resource = models.SparkJobDefinitionResource(properties=properties) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self._debug_spark_job_definition_initial.metadata['url'] # type: ignore @@ -419,13 +437,12 @@ async def _debug_spark_job_definition_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(_spark_job_definition_azure_resource, 'SparkJobDefinitionResource') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_sql_pools_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_sql_pools_operations.py new file mode 100644 index 000000000000..1c4f0feb7294 --- /dev/null +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_sql_pools_operations.py @@ -0,0 +1,146 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class SqlPoolsOperations: + """SqlPoolsOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.synapse.artifacts.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def list( + self, + **kwargs + ) -> "models.SqlPoolInfoListResult": + """List Sql Pools. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: SqlPoolInfoListResult, or the result of cls(response) + :rtype: ~azure.synapse.artifacts.models.SqlPoolInfoListResult + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.SqlPoolInfoListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2019-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ErrorContract, response) + raise HttpResponseError(response=response, model=error) + + deserialized = self._deserialize('SqlPoolInfoListResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + list.metadata = {'url': '/sqlPools'} # type: ignore + + async def get( + self, + sql_pool_name: str, + **kwargs + ) -> "models.SqlPool": + """Get Sql Pool. + + :param sql_pool_name: The Sql Pool name. + :type sql_pool_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: SqlPool, or the result of cls(response) + :rtype: ~azure.synapse.artifacts.models.SqlPool + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.SqlPool"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2019-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + 'sqlPoolName': self._serialize.url("sql_pool_name", sql_pool_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ErrorContract, response) + raise HttpResponseError(response=response, model=error) + + deserialized = self._deserialize('SqlPool', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/sqlPools/{sqlPoolName}'} # type: ignore diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations_async/_sql_script_operations_async.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_sql_script_operations.py similarity index 91% rename from sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations_async/_sql_script_operations_async.py rename to sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_sql_script_operations.py index 8cd3366930fa..bba7e75e340e 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations_async/_sql_script_operations_async.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_sql_script_operations.py @@ -9,7 +9,7 @@ import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest @@ -52,14 +52,17 @@ def get_sql_scripts_by_workspace( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SqlScriptsListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -130,12 +133,15 @@ async def create_or_update_sql_script( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SqlScriptResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) _sql_script = models.SqlScriptResource(properties=properties) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.create_or_update_sql_script.metadata['url'] # type: ignore @@ -154,13 +160,12 @@ async def create_or_update_sql_script( if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(_sql_script, 'SqlScriptResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -196,9 +201,12 @@ async def get_sql_script( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.SqlScriptResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self.get_sql_script.metadata['url'] # type: ignore @@ -216,7 +224,7 @@ async def get_sql_script( header_parameters = {} # type: Dict[str, Any] if if_none_match is not None: header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -252,9 +260,12 @@ async def delete_sql_script( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self.delete_sql_script.metadata['url'] # type: ignore @@ -270,6 +281,7 @@ async def delete_sql_script( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations_async/_trigger_operations_async.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_trigger_operations.py similarity index 80% rename from sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations_async/_trigger_operations_async.py rename to sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_trigger_operations.py index 95d10ac44346..866aedc0a1aa 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations_async/_trigger_operations_async.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_trigger_operations.py @@ -9,7 +9,7 @@ import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod @@ -54,14 +54,17 @@ def get_triggers_by_workspace( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -110,37 +113,26 @@ async def get_next(next_link=None): ) get_triggers_by_workspace.metadata = {'url': '/triggers'} # type: ignore - async def create_or_update_trigger( + async def _create_or_update_trigger_initial( self, trigger_name: str, properties: "models.Trigger", if_match: Optional[str] = None, **kwargs - ) -> "models.TriggerResource": - """Creates or updates a trigger. - - :param trigger_name: The trigger name. - :type trigger_name: str - :param properties: Properties of the trigger. - :type properties: ~azure.synapse.artifacts.models.Trigger - :param if_match: ETag of the trigger entity. Should only be specified for update, for which it - should match existing entity or can be * for unconditional update. - :type if_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: TriggerResource, or the result of cls(response) - :rtype: ~azure.synapse.artifacts.models.TriggerResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + ) -> Optional["models.TriggerResource"]: + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerResource"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) _trigger = models.TriggerResource(properties=properties) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL - url = self.create_or_update_trigger.metadata['url'] # type: ignore + url = self._create_or_update_trigger_initial.metadata['url'] # type: ignore path_format_arguments = { 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), @@ -156,28 +148,95 @@ async def create_or_update_trigger( if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(_trigger, 'TriggerResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [200]: + if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.CloudError, response) raise HttpResponseError(response=response, model=error) - deserialized = self._deserialize('TriggerResource', pipeline_response) + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('TriggerResource', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - create_or_update_trigger.metadata = {'url': '/triggers/{triggerName}'} # type: ignore + _create_or_update_trigger_initial.metadata = {'url': '/triggers/{triggerName}'} # type: ignore + + async def begin_create_or_update_trigger( + self, + trigger_name: str, + properties: "models.Trigger", + if_match: Optional[str] = None, + **kwargs + ) -> AsyncLROPoller["models.TriggerResource"]: + """Creates or updates a trigger. + + :param trigger_name: The trigger name. + :type trigger_name: str + :param properties: Properties of the trigger. + :type properties: ~azure.synapse.artifacts.models.Trigger + :param if_match: ETag of the trigger entity. Should only be specified for update, for which it + should match existing entity or can be * for unconditional update. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either TriggerResource or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.synapse.artifacts.models.TriggerResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', False) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerResource"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_or_update_trigger_initial( + trigger_name=trigger_name, + properties=properties, + if_match=if_match, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('TriggerResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: polling_method = AsyncLROBasePolling(lro_delay, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_create_or_update_trigger.metadata = {'url': '/triggers/{triggerName}'} # type: ignore async def get_trigger( self, @@ -198,9 +257,12 @@ async def get_trigger( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self.get_trigger.metadata['url'] # type: ignore @@ -218,7 +280,7 @@ async def get_trigger( header_parameters = {} # type: Dict[str, Any] if if_none_match is not None: header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -239,27 +301,21 @@ async def get_trigger( return deserialized get_trigger.metadata = {'url': '/triggers/{triggerName}'} # type: ignore - async def delete_trigger( + async def _delete_trigger_initial( self, trigger_name: str, **kwargs ) -> None: - """Deletes a trigger. - - :param trigger_name: The trigger name. - :type trigger_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL - url = self.delete_trigger.metadata['url'] # type: ignore + url = self._delete_trigger_initial.metadata['url'] # type: ignore path_format_arguments = { 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), @@ -272,12 +328,13 @@ async def delete_trigger( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [200, 204]: + if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.CloudError, response) raise HttpResponseError(response=response, model=error) @@ -285,7 +342,61 @@ async def delete_trigger( if cls: return cls(pipeline_response, None, {}) - delete_trigger.metadata = {'url': '/triggers/{triggerName}'} # type: ignore + _delete_trigger_initial.metadata = {'url': '/triggers/{triggerName}'} # type: ignore + + async def begin_delete_trigger( + self, + trigger_name: str, + **kwargs + ) -> AsyncLROPoller[None]: + """Deletes a trigger. + + :param trigger_name: The trigger name. + :type trigger_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', False) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_trigger_initial( + trigger_name=trigger_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: polling_method = AsyncLROBasePolling(lro_delay, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_delete_trigger.metadata = {'url': '/triggers/{triggerName}'} # type: ignore async def _subscribe_trigger_to_events_initial( self, @@ -293,9 +404,12 @@ async def _subscribe_trigger_to_events_initial( **kwargs ) -> Optional["models.TriggerSubscriptionOperationStatus"]: cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerSubscriptionOperationStatus"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self._subscribe_trigger_to_events_initial.metadata['url'] # type: ignore @@ -311,7 +425,7 @@ async def _subscribe_trigger_to_events_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -404,9 +518,12 @@ async def get_event_subscription_status( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerSubscriptionOperationStatus"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self.get_event_subscription_status.metadata['url'] # type: ignore @@ -422,7 +539,7 @@ async def get_event_subscription_status( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -447,9 +564,12 @@ async def _unsubscribe_trigger_from_events_initial( **kwargs ) -> Optional["models.TriggerSubscriptionOperationStatus"]: cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerSubscriptionOperationStatus"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self._unsubscribe_trigger_from_events_initial.metadata['url'] # type: ignore @@ -465,7 +585,7 @@ async def _unsubscribe_trigger_from_events_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -549,9 +669,12 @@ async def _start_trigger_initial( **kwargs ) -> None: cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self._start_trigger_initial.metadata['url'] # type: ignore @@ -567,6 +690,7 @@ async def _start_trigger_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -642,9 +766,12 @@ async def _stop_trigger_initial( **kwargs ) -> None: cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self._stop_trigger_initial.metadata['url'] # type: ignore @@ -660,6 +787,7 @@ async def _stop_trigger_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations_async/_trigger_run_operations_async.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_trigger_run_operations.py similarity index 67% rename from sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations_async/_trigger_run_operations_async.py rename to sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_trigger_run_operations.py index 8f8a60659f01..64299e81536b 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations_async/_trigger_run_operations_async.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_trigger_run_operations.py @@ -8,7 +8,7 @@ from typing import Any, Callable, Dict, Generic, Optional, TypeVar import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest @@ -57,9 +57,12 @@ async def rerun_trigger_instance( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self.rerun_trigger_instance.metadata['url'] # type: ignore @@ -76,6 +79,7 @@ async def rerun_trigger_instance( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -91,6 +95,62 @@ async def rerun_trigger_instance( rerun_trigger_instance.metadata = {'url': '/triggers/{triggerName}/triggerRuns/{runId}/rerun'} # type: ignore + async def cancel_trigger_instance( + self, + trigger_name: str, + run_id: str, + **kwargs + ) -> None: + """Cancel single trigger instance by runId. + + :param trigger_name: The trigger name. + :type trigger_name: str + :param run_id: The pipeline run identifier. + :type run_id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2019-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.cancel_trigger_instance.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + 'runId': self._serialize.url("run_id", run_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.CloudError, response) + raise HttpResponseError(response=response, model=error) + + if cls: + return cls(pipeline_response, None, {}) + + cancel_trigger_instance.metadata = {'url': '/triggers/{triggerName}/triggerRuns/{runId}/cancel'} # type: ignore + async def query_trigger_runs_by_workspace( self, filter_parameters: "models.RunFilterParameters", @@ -106,10 +166,13 @@ async def query_trigger_runs_by_workspace( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerRunsQueryResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.query_trigger_runs_by_workspace.metadata['url'] # type: ignore @@ -125,13 +188,12 @@ async def query_trigger_runs_by_workspace( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(filter_parameters, 'RunFilterParameters') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_workspace_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_workspace_operations.py new file mode 100644 index 000000000000..7704c4f36dc6 --- /dev/null +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations/_workspace_operations.py @@ -0,0 +1,91 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class WorkspaceOperations: + """WorkspaceOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.synapse.artifacts.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def get( + self, + **kwargs + ) -> "models.Workspace": + """Get Workspace. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Workspace, or the result of cls(response) + :rtype: ~azure.synapse.artifacts.models.Workspace + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.Workspace"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2019-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ErrorContract, response) + raise HttpResponseError(response=response, model=error) + + deserialized = self._deserialize('Workspace', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/workspace'} # type: ignore diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations_async/__init__.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations_async/__init__.py deleted file mode 100644 index 3ff8957df2ea..000000000000 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/aio/operations_async/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from ._linked_service_operations_async import LinkedServiceOperations -from ._dataset_operations_async import DatasetOperations -from ._pipeline_operations_async import PipelineOperations -from ._pipeline_run_operations_async import PipelineRunOperations -from ._trigger_operations_async import TriggerOperations -from ._trigger_run_operations_async import TriggerRunOperations -from ._data_flow_operations_async import DataFlowOperations -from ._data_flow_debug_session_operations_async import DataFlowDebugSessionOperations -from ._sql_script_operations_async import SqlScriptOperations -from ._spark_job_definition_operations_async import SparkJobDefinitionOperations -from ._notebook_operations_async import NotebookOperations - -__all__ = [ - 'LinkedServiceOperations', - 'DatasetOperations', - 'PipelineOperations', - 'PipelineRunOperations', - 'TriggerOperations', - 'TriggerRunOperations', - 'DataFlowOperations', - 'DataFlowDebugSessionOperations', - 'SqlScriptOperations', - 'SparkJobDefinitionOperations', - 'NotebookOperations', -] diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/__init__.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/__init__.py index 43d23d006cb7..1174fc376d89 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/__init__.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/__init__.py @@ -15,21 +15,49 @@ from ._models_py3 import AddDataFlowToDebugSessionResponse from ._models_py3 import AmazonMWSLinkedService from ._models_py3 import AmazonMWSObjectDataset + from ._models_py3 import AmazonMWSSource from ._models_py3 import AmazonRedshiftLinkedService + from ._models_py3 import AmazonRedshiftSource from ._models_py3 import AmazonRedshiftTableDataset from ._models_py3 import AmazonS3LinkedService + from ._models_py3 import AmazonS3Location + from ._models_py3 import AmazonS3ReadSettings from ._models_py3 import AppendVariableActivity + from ._models_py3 import AutoPauseProperties + from ._models_py3 import AutoScaleProperties from ._models_py3 import AvroDataset + from ._models_py3 import AvroFormat + from ._models_py3 import AvroSink + from ._models_py3 import AvroSource + from ._models_py3 import AvroWriteSettings from ._models_py3 import AzureBatchLinkedService from ._models_py3 import AzureBlobFSLinkedService + from ._models_py3 import AzureBlobFSLocation + from ._models_py3 import AzureBlobFSReadSettings + from ._models_py3 import AzureBlobFSSink + from ._models_py3 import AzureBlobFSSource + from ._models_py3 import AzureBlobFSWriteSettings from ._models_py3 import AzureBlobStorageLinkedService + from ._models_py3 import AzureBlobStorageLocation + from ._models_py3 import AzureBlobStorageReadSettings + from ._models_py3 import AzureBlobStorageWriteSettings from ._models_py3 import AzureDataExplorerCommandActivity from ._models_py3 import AzureDataExplorerLinkedService + from ._models_py3 import AzureDataExplorerSink + from ._models_py3 import AzureDataExplorerSource from ._models_py3 import AzureDataExplorerTableDataset from ._models_py3 import AzureDataLakeAnalyticsLinkedService from ._models_py3 import AzureDataLakeStoreLinkedService + from ._models_py3 import AzureDataLakeStoreLocation + from ._models_py3 import AzureDataLakeStoreReadSettings + from ._models_py3 import AzureDataLakeStoreSink + from ._models_py3 import AzureDataLakeStoreSource + from ._models_py3 import AzureDataLakeStoreWriteSettings from ._models_py3 import AzureDatabricksLinkedService + from ._models_py3 import AzureEntityResource from ._models_py3 import AzureFileStorageLinkedService + from ._models_py3 import AzureFileStorageLocation + from ._models_py3 import AzureFileStorageReadSettings from ._models_py3 import AzureFunctionActivity from ._models_py3 import AzureFunctionLinkedService from ._models_py3 import AzureKeyVaultLinkedService @@ -41,40 +69,70 @@ from ._models_py3 import AzureMLUpdateResourceActivity from ._models_py3 import AzureMLWebServiceFile from ._models_py3 import AzureMariaDBLinkedService + from ._models_py3 import AzureMariaDBSource from ._models_py3 import AzureMariaDBTableDataset from ._models_py3 import AzureMySqlLinkedService + from ._models_py3 import AzureMySqlSink + from ._models_py3 import AzureMySqlSource from ._models_py3 import AzureMySqlTableDataset from ._models_py3 import AzurePostgreSqlLinkedService + from ._models_py3 import AzurePostgreSqlSink + from ._models_py3 import AzurePostgreSqlSource from ._models_py3 import AzurePostgreSqlTableDataset + from ._models_py3 import AzureQueueSink from ._models_py3 import AzureSearchIndexDataset + from ._models_py3 import AzureSearchIndexSink from ._models_py3 import AzureSearchLinkedService from ._models_py3 import AzureSqlDWLinkedService from ._models_py3 import AzureSqlDWTableDataset from ._models_py3 import AzureSqlDatabaseLinkedService from ._models_py3 import AzureSqlMILinkedService from ._models_py3 import AzureSqlMITableDataset + from ._models_py3 import AzureSqlSink + from ._models_py3 import AzureSqlSource from ._models_py3 import AzureSqlTableDataset from ._models_py3 import AzureStorageLinkedService from ._models_py3 import AzureTableDataset + from ._models_py3 import AzureTableSink + from ._models_py3 import AzureTableSource from ._models_py3 import AzureTableStorageLinkedService from ._models_py3 import BigDataPoolReference + from ._models_py3 import BigDataPoolResourceInfo + from ._models_py3 import BigDataPoolResourceInfoListResult from ._models_py3 import BinaryDataset + from ._models_py3 import BinarySink + from ._models_py3 import BinarySource + from ._models_py3 import BlobEventsTrigger + from ._models_py3 import BlobSink + from ._models_py3 import BlobSource + from ._models_py3 import BlobTrigger from ._models_py3 import CassandraLinkedService + from ._models_py3 import CassandraSource from ._models_py3 import CassandraTableDataset + from ._models_py3 import ChainingTrigger from ._models_py3 import CloudError from ._models_py3 import CommonDataServiceForAppsEntityDataset from ._models_py3 import CommonDataServiceForAppsLinkedService + from ._models_py3 import CommonDataServiceForAppsSink + from ._models_py3 import CommonDataServiceForAppsSource from ._models_py3 import ConcurLinkedService from ._models_py3 import ConcurObjectDataset + from ._models_py3 import ConcurSource from ._models_py3 import ControlActivity from ._models_py3 import CopyActivity from ._models_py3 import CopySink from ._models_py3 import CopySource + from ._models_py3 import CopyTranslator from ._models_py3 import CosmosDbLinkedService from ._models_py3 import CosmosDbMongoDbApiCollectionDataset from ._models_py3 import CosmosDbMongoDbApiLinkedService + from ._models_py3 import CosmosDbMongoDbApiSink + from ._models_py3 import CosmosDbMongoDbApiSource from ._models_py3 import CosmosDbSqlApiCollectionDataset + from ._models_py3 import CosmosDbSqlApiSink + from ._models_py3 import CosmosDbSqlApiSource from ._models_py3 import CouchbaseLinkedService + from ._models_py3 import CouchbaseSource from ._models_py3 import CouchbaseTableDataset from ._models_py3 import CreateDataFlowDebugSessionRequest from ._models_py3 import CreateDataFlowDebugSessionResponse @@ -82,6 +140,10 @@ from ._models_py3 import CustomActivity from ._models_py3 import CustomActivityReferenceObject from ._models_py3 import CustomDataSourceLinkedService + from ._models_py3 import CustomDataset + from ._models_py3 import CustomSetupBase + from ._models_py3 import DWCopyCommandDefaultValue + from ._models_py3 import DWCopyCommandSettings from ._models_py3 import DataFlow from ._models_py3 import DataFlowDebugCommandRequest from ._models_py3 import DataFlowDebugCommandResponse @@ -102,12 +164,14 @@ from ._models_py3 import DataFlowSourceSetting from ._models_py3 import DataFlowStagingInfo from ._models_py3 import DataLakeAnalyticsUSQLActivity + from ._models_py3 import DataLakeStorageAccountDetails from ._models_py3 import DatabricksNotebookActivity from ._models_py3 import DatabricksSparkJarActivity from ._models_py3 import DatabricksSparkPythonActivity from ._models_py3 import Dataset from ._models_py3 import DatasetBZip2Compression from ._models_py3 import DatasetCompression + from ._models_py3 import DatasetDataElement from ._models_py3 import DatasetDebugResource from ._models_py3 import DatasetDeflateCompression from ._models_py3 import DatasetFolder @@ -116,23 +180,45 @@ from ._models_py3 import DatasetLocation from ._models_py3 import DatasetReference from ._models_py3 import DatasetResource + from ._models_py3 import DatasetSchemaDataElement + from ._models_py3 import DatasetStorageFormat from ._models_py3 import DatasetZipDeflateCompression from ._models_py3 import Db2LinkedService + from ._models_py3 import Db2Source from ._models_py3 import Db2TableDataset from ._models_py3 import DeleteActivity from ._models_py3 import DeleteDataFlowDebugSessionRequest from ._models_py3 import DelimitedTextDataset + from ._models_py3 import DelimitedTextReadSettings + from ._models_py3 import DelimitedTextSink + from ._models_py3 import DelimitedTextSource + from ._models_py3 import DelimitedTextWriteSettings + from ._models_py3 import DependencyReference + from ._models_py3 import DistcpSettings from ._models_py3 import DocumentDbCollectionDataset + from ._models_py3 import DocumentDbCollectionSink + from ._models_py3 import DocumentDbCollectionSource from ._models_py3 import DrillLinkedService + from ._models_py3 import DrillSource from ._models_py3 import DrillTableDataset from ._models_py3 import DynamicsAXLinkedService from ._models_py3 import DynamicsAXResourceDataset + from ._models_py3 import DynamicsAXSource from ._models_py3 import DynamicsCrmEntityDataset from ._models_py3 import DynamicsCrmLinkedService + from ._models_py3 import DynamicsCrmSink + from ._models_py3 import DynamicsCrmSource from ._models_py3 import DynamicsEntityDataset from ._models_py3 import DynamicsLinkedService + from ._models_py3 import DynamicsSink + from ._models_py3 import DynamicsSource from ._models_py3 import EloquaLinkedService from ._models_py3 import EloquaObjectDataset + from ._models_py3 import EloquaSource + from ._models_py3 import EntityReference + from ._models_py3 import ErrorAdditionalInfo + from ._models_py3 import ErrorContract + from ._models_py3 import ErrorResponse from ._models_py3 import EvaluateDataFlowExpressionRequest from ._models_py3 import ExecuteDataFlowActivity from ._models_py3 import ExecuteDataFlowActivityTypePropertiesCompute @@ -143,20 +229,35 @@ from ._models_py3 import ExposureControlResponse from ._models_py3 import Expression from ._models_py3 import FileServerLinkedService + from ._models_py3 import FileServerLocation + from ._models_py3 import FileServerReadSettings + from ._models_py3 import FileServerWriteSettings + from ._models_py3 import FileSystemSink + from ._models_py3 import FileSystemSource from ._models_py3 import FilterActivity from ._models_py3 import ForEachActivity + from ._models_py3 import FormatReadSettings + from ._models_py3 import FormatWriteSettings + from ._models_py3 import FtpReadSettings from ._models_py3 import FtpServerLinkedService + from ._models_py3 import FtpServerLocation from ._models_py3 import GetMetadataActivity from ._models_py3 import GetSsisObjectMetadataRequest from ._models_py3 import GoogleAdWordsLinkedService from ._models_py3 import GoogleAdWordsObjectDataset + from ._models_py3 import GoogleAdWordsSource from ._models_py3 import GoogleBigQueryLinkedService from ._models_py3 import GoogleBigQueryObjectDataset + from ._models_py3 import GoogleBigQuerySource from ._models_py3 import GoogleCloudStorageLinkedService + from ._models_py3 import GoogleCloudStorageLocation + from ._models_py3 import GoogleCloudStorageReadSettings from ._models_py3 import GreenplumLinkedService + from ._models_py3 import GreenplumSource from ._models_py3 import GreenplumTableDataset from ._models_py3 import HBaseLinkedService from ._models_py3 import HBaseObjectDataset + from ._models_py3 import HBaseSource from ._models_py3 import HDInsightHiveActivity from ._models_py3 import HDInsightLinkedService from ._models_py3 import HDInsightMapReduceActivity @@ -165,20 +266,50 @@ from ._models_py3 import HDInsightSparkActivity from ._models_py3 import HDInsightStreamingActivity from ._models_py3 import HdfsLinkedService + from ._models_py3 import HdfsLocation + from ._models_py3 import HdfsReadSettings + from ._models_py3 import HdfsSource from ._models_py3 import HiveLinkedService from ._models_py3 import HiveObjectDataset + from ._models_py3 import HiveSource from ._models_py3 import HttpLinkedService + from ._models_py3 import HttpReadSettings + from ._models_py3 import HttpServerLocation + from ._models_py3 import HttpSource from ._models_py3 import HubspotLinkedService from ._models_py3 import HubspotObjectDataset + from ._models_py3 import HubspotSource from ._models_py3 import IfConditionActivity from ._models_py3 import ImpalaLinkedService from ._models_py3 import ImpalaObjectDataset + from ._models_py3 import ImpalaSource from ._models_py3 import InformixLinkedService + from ._models_py3 import InformixSink + from ._models_py3 import InformixSource from ._models_py3 import InformixTableDataset + from ._models_py3 import IntegrationRuntime + from ._models_py3 import IntegrationRuntimeComputeProperties + from ._models_py3 import IntegrationRuntimeCustomSetupScriptProperties + from ._models_py3 import IntegrationRuntimeDataFlowProperties + from ._models_py3 import IntegrationRuntimeDataProxyProperties + from ._models_py3 import IntegrationRuntimeListResponse from ._models_py3 import IntegrationRuntimeReference + from ._models_py3 import IntegrationRuntimeResource + from ._models_py3 import IntegrationRuntimeSsisCatalogInfo + from ._models_py3 import IntegrationRuntimeSsisProperties + from ._models_py3 import IntegrationRuntimeVNetProperties from ._models_py3 import JiraLinkedService from ._models_py3 import JiraObjectDataset + from ._models_py3 import JiraSource from ._models_py3 import JsonDataset + from ._models_py3 import JsonFormat + from ._models_py3 import JsonSink + from ._models_py3 import JsonSource + from ._models_py3 import JsonWriteSettings + from ._models_py3 import LibraryRequirements + from ._models_py3 import LinkedIntegrationRuntimeKeyAuthorization + from ._models_py3 import LinkedIntegrationRuntimeRbacAuthorization + from ._models_py3 import LinkedIntegrationRuntimeType from ._models_py3 import LinkedService from ._models_py3 import LinkedServiceDebugResource from ._models_py3 import LinkedServiceListResponse @@ -188,21 +319,34 @@ from ._models_py3 import LookupActivity from ._models_py3 import MagentoLinkedService from ._models_py3 import MagentoObjectDataset + from ._models_py3 import MagentoSource + from ._models_py3 import ManagedIdentity + from ._models_py3 import ManagedIntegrationRuntime from ._models_py3 import MappingDataFlow from ._models_py3 import MariaDBLinkedService + from ._models_py3 import MariaDBSource from ._models_py3 import MariaDBTableDataset from ._models_py3 import MarketoLinkedService from ._models_py3 import MarketoObjectDataset + from ._models_py3 import MarketoSource from ._models_py3 import MicrosoftAccessLinkedService + from ._models_py3 import MicrosoftAccessSink + from ._models_py3 import MicrosoftAccessSource from ._models_py3 import MicrosoftAccessTableDataset from ._models_py3 import MongoDbCollectionDataset + from ._models_py3 import MongoDbCursorMethodsProperties from ._models_py3 import MongoDbLinkedService + from ._models_py3 import MongoDbSource from ._models_py3 import MongoDbV2CollectionDataset from ._models_py3 import MongoDbV2LinkedService + from ._models_py3 import MongoDbV2Source from ._models_py3 import MultiplePipelineTrigger from ._models_py3 import MySqlLinkedService + from ._models_py3 import MySqlSource from ._models_py3 import MySqlTableDataset from ._models_py3 import NetezzaLinkedService + from ._models_py3 import NetezzaPartitionSettings + from ._models_py3 import NetezzaSource from ._models_py3 import NetezzaTableDataset from ._models_py3 import Notebook from ._models_py3 import NotebookCell @@ -215,21 +359,37 @@ from ._models_py3 import NotebookSessionProperties from ._models_py3 import ODataLinkedService from ._models_py3 import ODataResourceDataset + from ._models_py3 import ODataSource from ._models_py3 import OdbcLinkedService + from ._models_py3 import OdbcSink + from ._models_py3 import OdbcSource from ._models_py3 import OdbcTableDataset from ._models_py3 import Office365Dataset from ._models_py3 import Office365LinkedService + from ._models_py3 import Office365Source from ._models_py3 import OracleLinkedService + from ._models_py3 import OraclePartitionSettings from ._models_py3 import OracleServiceCloudLinkedService from ._models_py3 import OracleServiceCloudObjectDataset + from ._models_py3 import OracleServiceCloudSource + from ._models_py3 import OracleSink + from ._models_py3 import OracleSource from ._models_py3 import OracleTableDataset from ._models_py3 import OrcDataset + from ._models_py3 import OrcFormat + from ._models_py3 import OrcSink + from ._models_py3 import OrcSource from ._models_py3 import ParameterSpecification from ._models_py3 import ParquetDataset + from ._models_py3 import ParquetFormat + from ._models_py3 import ParquetSink + from ._models_py3 import ParquetSource from ._models_py3 import PaypalLinkedService from ._models_py3 import PaypalObjectDataset + from ._models_py3 import PaypalSource from ._models_py3 import PhoenixLinkedService from ._models_py3 import PhoenixObjectDataset + from ._models_py3 import PhoenixSource from ._models_py3 import PipelineFolder from ._models_py3 import PipelineListResponse from ._models_py3 import PipelineReference @@ -237,14 +397,26 @@ from ._models_py3 import PipelineRun from ._models_py3 import PipelineRunInvokedBy from ._models_py3 import PipelineRunsQueryResponse + from ._models_py3 import PolybaseSettings from ._models_py3 import PostgreSqlLinkedService + from ._models_py3 import PostgreSqlSource from ._models_py3 import PostgreSqlTableDataset from ._models_py3 import PrestoLinkedService from ._models_py3 import PrestoObjectDataset + from ._models_py3 import PrestoSource + from ._models_py3 import PrivateEndpoint + from ._models_py3 import PrivateEndpointConnection + from ._models_py3 import PrivateLinkServiceConnectionState + from ._models_py3 import ProxyResource from ._models_py3 import QueryDataFlowDebugSessionsResponse from ._models_py3 import QuickBooksLinkedService from ._models_py3 import QuickBooksObjectDataset + from ._models_py3 import QuickBooksSource + from ._models_py3 import RecurrenceSchedule + from ._models_py3 import RecurrenceScheduleOccurrence from ._models_py3 import RedirectIncompatibleRowSettings + from ._models_py3 import RedshiftUnloadSettings + from ._models_py3 import RelationalSource from ._models_py3 import RelationalTableDataset from ._models_py3 import RerunTriggerListResponse from ._models_py3 import RerunTriggerResource @@ -253,8 +425,11 @@ from ._models_py3 import Resource from ._models_py3 import ResponsysLinkedService from ._models_py3 import ResponsysObjectDataset + from ._models_py3 import ResponsysSource from ._models_py3 import RestResourceDataset from ._models_py3 import RestServiceLinkedService + from ._models_py3 import RestSource + from ._models_py3 import RetryPolicy from ._models_py3 import RunFilterParameters from ._models_py3 import RunQueryFilter from ._models_py3 import RunQueryOrderBy @@ -268,30 +443,54 @@ from ._models_py3 import SalesforceLinkedService from ._models_py3 import SalesforceMarketingCloudLinkedService from ._models_py3 import SalesforceMarketingCloudObjectDataset + from ._models_py3 import SalesforceMarketingCloudSource from ._models_py3 import SalesforceObjectDataset from ._models_py3 import SalesforceServiceCloudLinkedService from ._models_py3 import SalesforceServiceCloudObjectDataset + from ._models_py3 import SalesforceServiceCloudSink + from ._models_py3 import SalesforceServiceCloudSource + from ._models_py3 import SalesforceSink + from ._models_py3 import SalesforceSource from ._models_py3 import SapBWLinkedService from ._models_py3 import SapBwCubeDataset + from ._models_py3 import SapBwSource from ._models_py3 import SapCloudForCustomerLinkedService from ._models_py3 import SapCloudForCustomerResourceDataset + from ._models_py3 import SapCloudForCustomerSink + from ._models_py3 import SapCloudForCustomerSource from ._models_py3 import SapEccLinkedService from ._models_py3 import SapEccResourceDataset + from ._models_py3 import SapEccSource from ._models_py3 import SapHanaLinkedService + from ._models_py3 import SapHanaPartitionSettings + from ._models_py3 import SapHanaSource from ._models_py3 import SapHanaTableDataset from ._models_py3 import SapOpenHubLinkedService + from ._models_py3 import SapOpenHubSource from ._models_py3 import SapOpenHubTableDataset from ._models_py3 import SapTableLinkedService + from ._models_py3 import SapTablePartitionSettings from ._models_py3 import SapTableResourceDataset + from ._models_py3 import SapTableSource + from ._models_py3 import ScheduleTrigger + from ._models_py3 import ScheduleTriggerRecurrence from ._models_py3 import ScriptAction from ._models_py3 import SecretBase from ._models_py3 import SecureString + from ._models_py3 import SelfDependencyTumblingWindowTriggerReference + from ._models_py3 import SelfHostedIntegrationRuntime from ._models_py3 import ServiceNowLinkedService from ._models_py3 import ServiceNowObjectDataset + from ._models_py3 import ServiceNowSource from ._models_py3 import SetVariableActivity + from ._models_py3 import SftpLocation + from ._models_py3 import SftpReadSettings from ._models_py3 import SftpServerLinkedService + from ._models_py3 import SftpWriteSettings from ._models_py3 import ShopifyLinkedService from ._models_py3 import ShopifyObjectDataset + from ._models_py3 import ShopifySource + from ._models_py3 import Sku from ._models_py3 import SparkBatchJob from ._models_py3 import SparkBatchJobState from ._models_py3 import SparkJobDefinition @@ -304,45 +503,78 @@ from ._models_py3 import SparkScheduler from ._models_py3 import SparkServiceError from ._models_py3 import SparkServicePlugin + from ._models_py3 import SparkSource from ._models_py3 import SqlConnection + from ._models_py3 import SqlDWSink + from ._models_py3 import SqlDWSource + from ._models_py3 import SqlMISink + from ._models_py3 import SqlMISource + from ._models_py3 import SqlPool + from ._models_py3 import SqlPoolInfoListResult + from ._models_py3 import SqlPoolReference + from ._models_py3 import SqlPoolStoredProcedureActivity from ._models_py3 import SqlScript from ._models_py3 import SqlScriptContent from ._models_py3 import SqlScriptMetadata from ._models_py3 import SqlScriptResource from ._models_py3 import SqlScriptsListResponse from ._models_py3 import SqlServerLinkedService + from ._models_py3 import SqlServerSink + from ._models_py3 import SqlServerSource from ._models_py3 import SqlServerStoredProcedureActivity from ._models_py3 import SqlServerTableDataset + from ._models_py3 import SqlSink + from ._models_py3 import SqlSource from ._models_py3 import SquareLinkedService from ._models_py3 import SquareObjectDataset + from ._models_py3 import SquareSource from ._models_py3 import SsisObjectMetadataStatusResponse from ._models_py3 import StagingSettings from ._models_py3 import StartDataFlowDebugSessionRequest from ._models_py3 import StartDataFlowDebugSessionResponse + from ._models_py3 import StoreReadSettings + from ._models_py3 import StoreWriteSettings from ._models_py3 import StoredProcedureParameter from ._models_py3 import SubResource from ._models_py3 import SubResourceDebugResource from ._models_py3 import SwitchActivity from ._models_py3 import SwitchCase from ._models_py3 import SybaseLinkedService + from ._models_py3 import SybaseSource from ._models_py3 import SybaseTableDataset + from ._models_py3 import SynapseNotebookActivity + from ._models_py3 import SynapseNotebookReference + from ._models_py3 import SynapseSparkJobDefinitionActivity + from ._models_py3 import SynapseSparkJobReference + from ._models_py3 import TabularSource + from ._models_py3 import TabularTranslator from ._models_py3 import TeradataLinkedService + from ._models_py3 import TeradataPartitionSettings + from ._models_py3 import TeradataSource from ._models_py3 import TeradataTableDataset + from ._models_py3 import TextFormat + from ._models_py3 import TrackedResource from ._models_py3 import Transformation from ._models_py3 import Trigger from ._models_py3 import TriggerDependencyProvisioningStatus + from ._models_py3 import TriggerDependencyReference from ._models_py3 import TriggerListResponse from ._models_py3 import TriggerPipelineReference + from ._models_py3 import TriggerReference from ._models_py3 import TriggerResource from ._models_py3 import TriggerRun from ._models_py3 import TriggerRunsQueryResponse from ._models_py3 import TriggerSubscriptionOperationStatus + from ._models_py3 import TumblingWindowTrigger + from ._models_py3 import TumblingWindowTriggerDependencyReference from ._models_py3 import UntilActivity from ._models_py3 import UserProperty from ._models_py3 import ValidationActivity from ._models_py3 import VariableSpecification from ._models_py3 import VerticaLinkedService + from ._models_py3 import VerticaSource from ._models_py3 import VerticaTableDataset + from ._models_py3 import VirtualNetworkProfile from ._models_py3 import WaitActivity from ._models_py3 import WebActivity from ._models_py3 import WebActivityAuthentication @@ -352,14 +584,17 @@ from ._models_py3 import WebHookActivity from ._models_py3 import WebLinkedService from ._models_py3 import WebLinkedServiceTypeProperties + from ._models_py3 import WebSource from ._models_py3 import WebTableDataset from ._models_py3 import Workspace from ._models_py3 import WorkspaceIdentity from ._models_py3 import WorkspaceUpdateParameters from ._models_py3 import XeroLinkedService from ._models_py3 import XeroObjectDataset + from ._models_py3 import XeroSource from ._models_py3 import ZohoLinkedService from ._models_py3 import ZohoObjectDataset + from ._models_py3 import ZohoSource except (SyntaxError, ImportError): from ._models import Activity # type: ignore from ._models import ActivityDependency # type: ignore @@ -369,21 +604,49 @@ from ._models import AddDataFlowToDebugSessionResponse # type: ignore from ._models import AmazonMWSLinkedService # type: ignore from ._models import AmazonMWSObjectDataset # type: ignore + from ._models import AmazonMWSSource # type: ignore from ._models import AmazonRedshiftLinkedService # type: ignore + from ._models import AmazonRedshiftSource # type: ignore from ._models import AmazonRedshiftTableDataset # type: ignore from ._models import AmazonS3LinkedService # type: ignore + from ._models import AmazonS3Location # type: ignore + from ._models import AmazonS3ReadSettings # type: ignore from ._models import AppendVariableActivity # type: ignore + from ._models import AutoPauseProperties # type: ignore + from ._models import AutoScaleProperties # type: ignore from ._models import AvroDataset # type: ignore + from ._models import AvroFormat # type: ignore + from ._models import AvroSink # type: ignore + from ._models import AvroSource # type: ignore + from ._models import AvroWriteSettings # type: ignore from ._models import AzureBatchLinkedService # type: ignore from ._models import AzureBlobFSLinkedService # type: ignore + from ._models import AzureBlobFSLocation # type: ignore + from ._models import AzureBlobFSReadSettings # type: ignore + from ._models import AzureBlobFSSink # type: ignore + from ._models import AzureBlobFSSource # type: ignore + from ._models import AzureBlobFSWriteSettings # type: ignore from ._models import AzureBlobStorageLinkedService # type: ignore + from ._models import AzureBlobStorageLocation # type: ignore + from ._models import AzureBlobStorageReadSettings # type: ignore + from ._models import AzureBlobStorageWriteSettings # type: ignore from ._models import AzureDataExplorerCommandActivity # type: ignore from ._models import AzureDataExplorerLinkedService # type: ignore + from ._models import AzureDataExplorerSink # type: ignore + from ._models import AzureDataExplorerSource # type: ignore from ._models import AzureDataExplorerTableDataset # type: ignore from ._models import AzureDataLakeAnalyticsLinkedService # type: ignore from ._models import AzureDataLakeStoreLinkedService # type: ignore + from ._models import AzureDataLakeStoreLocation # type: ignore + from ._models import AzureDataLakeStoreReadSettings # type: ignore + from ._models import AzureDataLakeStoreSink # type: ignore + from ._models import AzureDataLakeStoreSource # type: ignore + from ._models import AzureDataLakeStoreWriteSettings # type: ignore from ._models import AzureDatabricksLinkedService # type: ignore + from ._models import AzureEntityResource # type: ignore from ._models import AzureFileStorageLinkedService # type: ignore + from ._models import AzureFileStorageLocation # type: ignore + from ._models import AzureFileStorageReadSettings # type: ignore from ._models import AzureFunctionActivity # type: ignore from ._models import AzureFunctionLinkedService # type: ignore from ._models import AzureKeyVaultLinkedService # type: ignore @@ -395,40 +658,70 @@ from ._models import AzureMLUpdateResourceActivity # type: ignore from ._models import AzureMLWebServiceFile # type: ignore from ._models import AzureMariaDBLinkedService # type: ignore + from ._models import AzureMariaDBSource # type: ignore from ._models import AzureMariaDBTableDataset # type: ignore from ._models import AzureMySqlLinkedService # type: ignore + from ._models import AzureMySqlSink # type: ignore + from ._models import AzureMySqlSource # type: ignore from ._models import AzureMySqlTableDataset # type: ignore from ._models import AzurePostgreSqlLinkedService # type: ignore + from ._models import AzurePostgreSqlSink # type: ignore + from ._models import AzurePostgreSqlSource # type: ignore from ._models import AzurePostgreSqlTableDataset # type: ignore + from ._models import AzureQueueSink # type: ignore from ._models import AzureSearchIndexDataset # type: ignore + from ._models import AzureSearchIndexSink # type: ignore from ._models import AzureSearchLinkedService # type: ignore from ._models import AzureSqlDWLinkedService # type: ignore from ._models import AzureSqlDWTableDataset # type: ignore from ._models import AzureSqlDatabaseLinkedService # type: ignore from ._models import AzureSqlMILinkedService # type: ignore from ._models import AzureSqlMITableDataset # type: ignore + from ._models import AzureSqlSink # type: ignore + from ._models import AzureSqlSource # type: ignore from ._models import AzureSqlTableDataset # type: ignore from ._models import AzureStorageLinkedService # type: ignore from ._models import AzureTableDataset # type: ignore + from ._models import AzureTableSink # type: ignore + from ._models import AzureTableSource # type: ignore from ._models import AzureTableStorageLinkedService # type: ignore from ._models import BigDataPoolReference # type: ignore + from ._models import BigDataPoolResourceInfo # type: ignore + from ._models import BigDataPoolResourceInfoListResult # type: ignore from ._models import BinaryDataset # type: ignore + from ._models import BinarySink # type: ignore + from ._models import BinarySource # type: ignore + from ._models import BlobEventsTrigger # type: ignore + from ._models import BlobSink # type: ignore + from ._models import BlobSource # type: ignore + from ._models import BlobTrigger # type: ignore from ._models import CassandraLinkedService # type: ignore + from ._models import CassandraSource # type: ignore from ._models import CassandraTableDataset # type: ignore + from ._models import ChainingTrigger # type: ignore from ._models import CloudError # type: ignore from ._models import CommonDataServiceForAppsEntityDataset # type: ignore from ._models import CommonDataServiceForAppsLinkedService # type: ignore + from ._models import CommonDataServiceForAppsSink # type: ignore + from ._models import CommonDataServiceForAppsSource # type: ignore from ._models import ConcurLinkedService # type: ignore from ._models import ConcurObjectDataset # type: ignore + from ._models import ConcurSource # type: ignore from ._models import ControlActivity # type: ignore from ._models import CopyActivity # type: ignore from ._models import CopySink # type: ignore from ._models import CopySource # type: ignore + from ._models import CopyTranslator # type: ignore from ._models import CosmosDbLinkedService # type: ignore from ._models import CosmosDbMongoDbApiCollectionDataset # type: ignore from ._models import CosmosDbMongoDbApiLinkedService # type: ignore + from ._models import CosmosDbMongoDbApiSink # type: ignore + from ._models import CosmosDbMongoDbApiSource # type: ignore from ._models import CosmosDbSqlApiCollectionDataset # type: ignore + from ._models import CosmosDbSqlApiSink # type: ignore + from ._models import CosmosDbSqlApiSource # type: ignore from ._models import CouchbaseLinkedService # type: ignore + from ._models import CouchbaseSource # type: ignore from ._models import CouchbaseTableDataset # type: ignore from ._models import CreateDataFlowDebugSessionRequest # type: ignore from ._models import CreateDataFlowDebugSessionResponse # type: ignore @@ -436,6 +729,10 @@ from ._models import CustomActivity # type: ignore from ._models import CustomActivityReferenceObject # type: ignore from ._models import CustomDataSourceLinkedService # type: ignore + from ._models import CustomDataset # type: ignore + from ._models import CustomSetupBase # type: ignore + from ._models import DWCopyCommandDefaultValue # type: ignore + from ._models import DWCopyCommandSettings # type: ignore from ._models import DataFlow # type: ignore from ._models import DataFlowDebugCommandRequest # type: ignore from ._models import DataFlowDebugCommandResponse # type: ignore @@ -456,12 +753,14 @@ from ._models import DataFlowSourceSetting # type: ignore from ._models import DataFlowStagingInfo # type: ignore from ._models import DataLakeAnalyticsUSQLActivity # type: ignore + from ._models import DataLakeStorageAccountDetails # type: ignore from ._models import DatabricksNotebookActivity # type: ignore from ._models import DatabricksSparkJarActivity # type: ignore from ._models import DatabricksSparkPythonActivity # type: ignore from ._models import Dataset # type: ignore from ._models import DatasetBZip2Compression # type: ignore from ._models import DatasetCompression # type: ignore + from ._models import DatasetDataElement # type: ignore from ._models import DatasetDebugResource # type: ignore from ._models import DatasetDeflateCompression # type: ignore from ._models import DatasetFolder # type: ignore @@ -470,23 +769,45 @@ from ._models import DatasetLocation # type: ignore from ._models import DatasetReference # type: ignore from ._models import DatasetResource # type: ignore + from ._models import DatasetSchemaDataElement # type: ignore + from ._models import DatasetStorageFormat # type: ignore from ._models import DatasetZipDeflateCompression # type: ignore from ._models import Db2LinkedService # type: ignore + from ._models import Db2Source # type: ignore from ._models import Db2TableDataset # type: ignore from ._models import DeleteActivity # type: ignore from ._models import DeleteDataFlowDebugSessionRequest # type: ignore from ._models import DelimitedTextDataset # type: ignore + from ._models import DelimitedTextReadSettings # type: ignore + from ._models import DelimitedTextSink # type: ignore + from ._models import DelimitedTextSource # type: ignore + from ._models import DelimitedTextWriteSettings # type: ignore + from ._models import DependencyReference # type: ignore + from ._models import DistcpSettings # type: ignore from ._models import DocumentDbCollectionDataset # type: ignore + from ._models import DocumentDbCollectionSink # type: ignore + from ._models import DocumentDbCollectionSource # type: ignore from ._models import DrillLinkedService # type: ignore + from ._models import DrillSource # type: ignore from ._models import DrillTableDataset # type: ignore from ._models import DynamicsAXLinkedService # type: ignore from ._models import DynamicsAXResourceDataset # type: ignore + from ._models import DynamicsAXSource # type: ignore from ._models import DynamicsCrmEntityDataset # type: ignore from ._models import DynamicsCrmLinkedService # type: ignore + from ._models import DynamicsCrmSink # type: ignore + from ._models import DynamicsCrmSource # type: ignore from ._models import DynamicsEntityDataset # type: ignore from ._models import DynamicsLinkedService # type: ignore + from ._models import DynamicsSink # type: ignore + from ._models import DynamicsSource # type: ignore from ._models import EloquaLinkedService # type: ignore from ._models import EloquaObjectDataset # type: ignore + from ._models import EloquaSource # type: ignore + from ._models import EntityReference # type: ignore + from ._models import ErrorAdditionalInfo # type: ignore + from ._models import ErrorContract # type: ignore + from ._models import ErrorResponse # type: ignore from ._models import EvaluateDataFlowExpressionRequest # type: ignore from ._models import ExecuteDataFlowActivity # type: ignore from ._models import ExecuteDataFlowActivityTypePropertiesCompute # type: ignore @@ -497,20 +818,35 @@ from ._models import ExposureControlResponse # type: ignore from ._models import Expression # type: ignore from ._models import FileServerLinkedService # type: ignore + from ._models import FileServerLocation # type: ignore + from ._models import FileServerReadSettings # type: ignore + from ._models import FileServerWriteSettings # type: ignore + from ._models import FileSystemSink # type: ignore + from ._models import FileSystemSource # type: ignore from ._models import FilterActivity # type: ignore from ._models import ForEachActivity # type: ignore + from ._models import FormatReadSettings # type: ignore + from ._models import FormatWriteSettings # type: ignore + from ._models import FtpReadSettings # type: ignore from ._models import FtpServerLinkedService # type: ignore + from ._models import FtpServerLocation # type: ignore from ._models import GetMetadataActivity # type: ignore from ._models import GetSsisObjectMetadataRequest # type: ignore from ._models import GoogleAdWordsLinkedService # type: ignore from ._models import GoogleAdWordsObjectDataset # type: ignore + from ._models import GoogleAdWordsSource # type: ignore from ._models import GoogleBigQueryLinkedService # type: ignore from ._models import GoogleBigQueryObjectDataset # type: ignore + from ._models import GoogleBigQuerySource # type: ignore from ._models import GoogleCloudStorageLinkedService # type: ignore + from ._models import GoogleCloudStorageLocation # type: ignore + from ._models import GoogleCloudStorageReadSettings # type: ignore from ._models import GreenplumLinkedService # type: ignore + from ._models import GreenplumSource # type: ignore from ._models import GreenplumTableDataset # type: ignore from ._models import HBaseLinkedService # type: ignore from ._models import HBaseObjectDataset # type: ignore + from ._models import HBaseSource # type: ignore from ._models import HDInsightHiveActivity # type: ignore from ._models import HDInsightLinkedService # type: ignore from ._models import HDInsightMapReduceActivity # type: ignore @@ -519,20 +855,50 @@ from ._models import HDInsightSparkActivity # type: ignore from ._models import HDInsightStreamingActivity # type: ignore from ._models import HdfsLinkedService # type: ignore + from ._models import HdfsLocation # type: ignore + from ._models import HdfsReadSettings # type: ignore + from ._models import HdfsSource # type: ignore from ._models import HiveLinkedService # type: ignore from ._models import HiveObjectDataset # type: ignore + from ._models import HiveSource # type: ignore from ._models import HttpLinkedService # type: ignore + from ._models import HttpReadSettings # type: ignore + from ._models import HttpServerLocation # type: ignore + from ._models import HttpSource # type: ignore from ._models import HubspotLinkedService # type: ignore from ._models import HubspotObjectDataset # type: ignore + from ._models import HubspotSource # type: ignore from ._models import IfConditionActivity # type: ignore from ._models import ImpalaLinkedService # type: ignore from ._models import ImpalaObjectDataset # type: ignore + from ._models import ImpalaSource # type: ignore from ._models import InformixLinkedService # type: ignore + from ._models import InformixSink # type: ignore + from ._models import InformixSource # type: ignore from ._models import InformixTableDataset # type: ignore + from ._models import IntegrationRuntime # type: ignore + from ._models import IntegrationRuntimeComputeProperties # type: ignore + from ._models import IntegrationRuntimeCustomSetupScriptProperties # type: ignore + from ._models import IntegrationRuntimeDataFlowProperties # type: ignore + from ._models import IntegrationRuntimeDataProxyProperties # type: ignore + from ._models import IntegrationRuntimeListResponse # type: ignore from ._models import IntegrationRuntimeReference # type: ignore + from ._models import IntegrationRuntimeResource # type: ignore + from ._models import IntegrationRuntimeSsisCatalogInfo # type: ignore + from ._models import IntegrationRuntimeSsisProperties # type: ignore + from ._models import IntegrationRuntimeVNetProperties # type: ignore from ._models import JiraLinkedService # type: ignore from ._models import JiraObjectDataset # type: ignore + from ._models import JiraSource # type: ignore from ._models import JsonDataset # type: ignore + from ._models import JsonFormat # type: ignore + from ._models import JsonSink # type: ignore + from ._models import JsonSource # type: ignore + from ._models import JsonWriteSettings # type: ignore + from ._models import LibraryRequirements # type: ignore + from ._models import LinkedIntegrationRuntimeKeyAuthorization # type: ignore + from ._models import LinkedIntegrationRuntimeRbacAuthorization # type: ignore + from ._models import LinkedIntegrationRuntimeType # type: ignore from ._models import LinkedService # type: ignore from ._models import LinkedServiceDebugResource # type: ignore from ._models import LinkedServiceListResponse # type: ignore @@ -542,21 +908,34 @@ from ._models import LookupActivity # type: ignore from ._models import MagentoLinkedService # type: ignore from ._models import MagentoObjectDataset # type: ignore + from ._models import MagentoSource # type: ignore + from ._models import ManagedIdentity # type: ignore + from ._models import ManagedIntegrationRuntime # type: ignore from ._models import MappingDataFlow # type: ignore from ._models import MariaDBLinkedService # type: ignore + from ._models import MariaDBSource # type: ignore from ._models import MariaDBTableDataset # type: ignore from ._models import MarketoLinkedService # type: ignore from ._models import MarketoObjectDataset # type: ignore + from ._models import MarketoSource # type: ignore from ._models import MicrosoftAccessLinkedService # type: ignore + from ._models import MicrosoftAccessSink # type: ignore + from ._models import MicrosoftAccessSource # type: ignore from ._models import MicrosoftAccessTableDataset # type: ignore from ._models import MongoDbCollectionDataset # type: ignore + from ._models import MongoDbCursorMethodsProperties # type: ignore from ._models import MongoDbLinkedService # type: ignore + from ._models import MongoDbSource # type: ignore from ._models import MongoDbV2CollectionDataset # type: ignore from ._models import MongoDbV2LinkedService # type: ignore + from ._models import MongoDbV2Source # type: ignore from ._models import MultiplePipelineTrigger # type: ignore from ._models import MySqlLinkedService # type: ignore + from ._models import MySqlSource # type: ignore from ._models import MySqlTableDataset # type: ignore from ._models import NetezzaLinkedService # type: ignore + from ._models import NetezzaPartitionSettings # type: ignore + from ._models import NetezzaSource # type: ignore from ._models import NetezzaTableDataset # type: ignore from ._models import Notebook # type: ignore from ._models import NotebookCell # type: ignore @@ -569,21 +948,37 @@ from ._models import NotebookSessionProperties # type: ignore from ._models import ODataLinkedService # type: ignore from ._models import ODataResourceDataset # type: ignore + from ._models import ODataSource # type: ignore from ._models import OdbcLinkedService # type: ignore + from ._models import OdbcSink # type: ignore + from ._models import OdbcSource # type: ignore from ._models import OdbcTableDataset # type: ignore from ._models import Office365Dataset # type: ignore from ._models import Office365LinkedService # type: ignore + from ._models import Office365Source # type: ignore from ._models import OracleLinkedService # type: ignore + from ._models import OraclePartitionSettings # type: ignore from ._models import OracleServiceCloudLinkedService # type: ignore from ._models import OracleServiceCloudObjectDataset # type: ignore + from ._models import OracleServiceCloudSource # type: ignore + from ._models import OracleSink # type: ignore + from ._models import OracleSource # type: ignore from ._models import OracleTableDataset # type: ignore from ._models import OrcDataset # type: ignore + from ._models import OrcFormat # type: ignore + from ._models import OrcSink # type: ignore + from ._models import OrcSource # type: ignore from ._models import ParameterSpecification # type: ignore from ._models import ParquetDataset # type: ignore + from ._models import ParquetFormat # type: ignore + from ._models import ParquetSink # type: ignore + from ._models import ParquetSource # type: ignore from ._models import PaypalLinkedService # type: ignore from ._models import PaypalObjectDataset # type: ignore + from ._models import PaypalSource # type: ignore from ._models import PhoenixLinkedService # type: ignore from ._models import PhoenixObjectDataset # type: ignore + from ._models import PhoenixSource # type: ignore from ._models import PipelineFolder # type: ignore from ._models import PipelineListResponse # type: ignore from ._models import PipelineReference # type: ignore @@ -591,14 +986,26 @@ from ._models import PipelineRun # type: ignore from ._models import PipelineRunInvokedBy # type: ignore from ._models import PipelineRunsQueryResponse # type: ignore + from ._models import PolybaseSettings # type: ignore from ._models import PostgreSqlLinkedService # type: ignore + from ._models import PostgreSqlSource # type: ignore from ._models import PostgreSqlTableDataset # type: ignore from ._models import PrestoLinkedService # type: ignore from ._models import PrestoObjectDataset # type: ignore + from ._models import PrestoSource # type: ignore + from ._models import PrivateEndpoint # type: ignore + from ._models import PrivateEndpointConnection # type: ignore + from ._models import PrivateLinkServiceConnectionState # type: ignore + from ._models import ProxyResource # type: ignore from ._models import QueryDataFlowDebugSessionsResponse # type: ignore from ._models import QuickBooksLinkedService # type: ignore from ._models import QuickBooksObjectDataset # type: ignore + from ._models import QuickBooksSource # type: ignore + from ._models import RecurrenceSchedule # type: ignore + from ._models import RecurrenceScheduleOccurrence # type: ignore from ._models import RedirectIncompatibleRowSettings # type: ignore + from ._models import RedshiftUnloadSettings # type: ignore + from ._models import RelationalSource # type: ignore from ._models import RelationalTableDataset # type: ignore from ._models import RerunTriggerListResponse # type: ignore from ._models import RerunTriggerResource # type: ignore @@ -607,8 +1014,11 @@ from ._models import Resource # type: ignore from ._models import ResponsysLinkedService # type: ignore from ._models import ResponsysObjectDataset # type: ignore + from ._models import ResponsysSource # type: ignore from ._models import RestResourceDataset # type: ignore from ._models import RestServiceLinkedService # type: ignore + from ._models import RestSource # type: ignore + from ._models import RetryPolicy # type: ignore from ._models import RunFilterParameters # type: ignore from ._models import RunQueryFilter # type: ignore from ._models import RunQueryOrderBy # type: ignore @@ -622,30 +1032,54 @@ from ._models import SalesforceLinkedService # type: ignore from ._models import SalesforceMarketingCloudLinkedService # type: ignore from ._models import SalesforceMarketingCloudObjectDataset # type: ignore + from ._models import SalesforceMarketingCloudSource # type: ignore from ._models import SalesforceObjectDataset # type: ignore from ._models import SalesforceServiceCloudLinkedService # type: ignore from ._models import SalesforceServiceCloudObjectDataset # type: ignore + from ._models import SalesforceServiceCloudSink # type: ignore + from ._models import SalesforceServiceCloudSource # type: ignore + from ._models import SalesforceSink # type: ignore + from ._models import SalesforceSource # type: ignore from ._models import SapBWLinkedService # type: ignore from ._models import SapBwCubeDataset # type: ignore + from ._models import SapBwSource # type: ignore from ._models import SapCloudForCustomerLinkedService # type: ignore from ._models import SapCloudForCustomerResourceDataset # type: ignore + from ._models import SapCloudForCustomerSink # type: ignore + from ._models import SapCloudForCustomerSource # type: ignore from ._models import SapEccLinkedService # type: ignore from ._models import SapEccResourceDataset # type: ignore + from ._models import SapEccSource # type: ignore from ._models import SapHanaLinkedService # type: ignore + from ._models import SapHanaPartitionSettings # type: ignore + from ._models import SapHanaSource # type: ignore from ._models import SapHanaTableDataset # type: ignore from ._models import SapOpenHubLinkedService # type: ignore + from ._models import SapOpenHubSource # type: ignore from ._models import SapOpenHubTableDataset # type: ignore from ._models import SapTableLinkedService # type: ignore + from ._models import SapTablePartitionSettings # type: ignore from ._models import SapTableResourceDataset # type: ignore + from ._models import SapTableSource # type: ignore + from ._models import ScheduleTrigger # type: ignore + from ._models import ScheduleTriggerRecurrence # type: ignore from ._models import ScriptAction # type: ignore from ._models import SecretBase # type: ignore from ._models import SecureString # type: ignore + from ._models import SelfDependencyTumblingWindowTriggerReference # type: ignore + from ._models import SelfHostedIntegrationRuntime # type: ignore from ._models import ServiceNowLinkedService # type: ignore from ._models import ServiceNowObjectDataset # type: ignore + from ._models import ServiceNowSource # type: ignore from ._models import SetVariableActivity # type: ignore + from ._models import SftpLocation # type: ignore + from ._models import SftpReadSettings # type: ignore from ._models import SftpServerLinkedService # type: ignore + from ._models import SftpWriteSettings # type: ignore from ._models import ShopifyLinkedService # type: ignore from ._models import ShopifyObjectDataset # type: ignore + from ._models import ShopifySource # type: ignore + from ._models import Sku # type: ignore from ._models import SparkBatchJob # type: ignore from ._models import SparkBatchJobState # type: ignore from ._models import SparkJobDefinition # type: ignore @@ -658,45 +1092,78 @@ from ._models import SparkScheduler # type: ignore from ._models import SparkServiceError # type: ignore from ._models import SparkServicePlugin # type: ignore + from ._models import SparkSource # type: ignore from ._models import SqlConnection # type: ignore + from ._models import SqlDWSink # type: ignore + from ._models import SqlDWSource # type: ignore + from ._models import SqlMISink # type: ignore + from ._models import SqlMISource # type: ignore + from ._models import SqlPool # type: ignore + from ._models import SqlPoolInfoListResult # type: ignore + from ._models import SqlPoolReference # type: ignore + from ._models import SqlPoolStoredProcedureActivity # type: ignore from ._models import SqlScript # type: ignore from ._models import SqlScriptContent # type: ignore from ._models import SqlScriptMetadata # type: ignore from ._models import SqlScriptResource # type: ignore from ._models import SqlScriptsListResponse # type: ignore from ._models import SqlServerLinkedService # type: ignore + from ._models import SqlServerSink # type: ignore + from ._models import SqlServerSource # type: ignore from ._models import SqlServerStoredProcedureActivity # type: ignore from ._models import SqlServerTableDataset # type: ignore + from ._models import SqlSink # type: ignore + from ._models import SqlSource # type: ignore from ._models import SquareLinkedService # type: ignore from ._models import SquareObjectDataset # type: ignore + from ._models import SquareSource # type: ignore from ._models import SsisObjectMetadataStatusResponse # type: ignore from ._models import StagingSettings # type: ignore from ._models import StartDataFlowDebugSessionRequest # type: ignore from ._models import StartDataFlowDebugSessionResponse # type: ignore + from ._models import StoreReadSettings # type: ignore + from ._models import StoreWriteSettings # type: ignore from ._models import StoredProcedureParameter # type: ignore from ._models import SubResource # type: ignore from ._models import SubResourceDebugResource # type: ignore from ._models import SwitchActivity # type: ignore from ._models import SwitchCase # type: ignore from ._models import SybaseLinkedService # type: ignore + from ._models import SybaseSource # type: ignore from ._models import SybaseTableDataset # type: ignore + from ._models import SynapseNotebookActivity # type: ignore + from ._models import SynapseNotebookReference # type: ignore + from ._models import SynapseSparkJobDefinitionActivity # type: ignore + from ._models import SynapseSparkJobReference # type: ignore + from ._models import TabularSource # type: ignore + from ._models import TabularTranslator # type: ignore from ._models import TeradataLinkedService # type: ignore + from ._models import TeradataPartitionSettings # type: ignore + from ._models import TeradataSource # type: ignore from ._models import TeradataTableDataset # type: ignore + from ._models import TextFormat # type: ignore + from ._models import TrackedResource # type: ignore from ._models import Transformation # type: ignore from ._models import Trigger # type: ignore from ._models import TriggerDependencyProvisioningStatus # type: ignore + from ._models import TriggerDependencyReference # type: ignore from ._models import TriggerListResponse # type: ignore from ._models import TriggerPipelineReference # type: ignore + from ._models import TriggerReference # type: ignore from ._models import TriggerResource # type: ignore from ._models import TriggerRun # type: ignore from ._models import TriggerRunsQueryResponse # type: ignore from ._models import TriggerSubscriptionOperationStatus # type: ignore + from ._models import TumblingWindowTrigger # type: ignore + from ._models import TumblingWindowTriggerDependencyReference # type: ignore from ._models import UntilActivity # type: ignore from ._models import UserProperty # type: ignore from ._models import ValidationActivity # type: ignore from ._models import VariableSpecification # type: ignore from ._models import VerticaLinkedService # type: ignore + from ._models import VerticaSource # type: ignore from ._models import VerticaTableDataset # type: ignore + from ._models import VirtualNetworkProfile # type: ignore from ._models import WaitActivity # type: ignore from ._models import WebActivity # type: ignore from ._models import WebActivityAuthentication # type: ignore @@ -706,27 +1173,41 @@ from ._models import WebHookActivity # type: ignore from ._models import WebLinkedService # type: ignore from ._models import WebLinkedServiceTypeProperties # type: ignore + from ._models import WebSource # type: ignore from ._models import WebTableDataset # type: ignore from ._models import Workspace # type: ignore from ._models import WorkspaceIdentity # type: ignore from ._models import WorkspaceUpdateParameters # type: ignore from ._models import XeroLinkedService # type: ignore from ._models import XeroObjectDataset # type: ignore + from ._models import XeroSource # type: ignore from ._models import ZohoLinkedService # type: ignore from ._models import ZohoObjectDataset # type: ignore + from ._models import ZohoSource # type: ignore from ._artifacts_client_enums import ( AvroCompressionCodec, AzureFunctionActivityMethod, + AzureSearchIndexWriteBehaviorType, + BigDataPoolReferenceType, + BlobEventTypes, + CassandraSourceReadConsistencyLevels, CellOutputType, + CopyBehaviorType, DataFlowComputeType, + DataFlowReferenceType, DatasetCompressionLevel, + DatasetReferenceType, + DayOfWeek, + Db2AuthenticationType, DelimitedTextCompressionCodec, DependencyCondition, DynamicsAuthenticationType, DynamicsDeploymentType, DynamicsServicePrincipalCredentialType, + DynamicsSinkWriteBehavior, EventSubscriptionStatus, + ExpressionType, FtpAuthenticationType, GoogleAdWordsAuthenticationType, GoogleBigQueryAuthenticationType, @@ -738,40 +1219,73 @@ HiveThriftTransportProtocol, HttpAuthenticationType, ImpalaAuthenticationType, + IntegrationRuntimeEdition, + IntegrationRuntimeEntityReferenceType, + IntegrationRuntimeLicenseType, + IntegrationRuntimeReferenceType, + IntegrationRuntimeSsisCatalogPricingTier, + IntegrationRuntimeState, + IntegrationRuntimeType, + JsonFormatFilePattern, + JsonWriteFilePattern, MongoDbAuthenticationType, + NetezzaPartitionOption, + NodeSize, + NodeSizeFamily, + NotebookReferenceType, ODataAadServicePrincipalCredentialType, ODataAuthenticationType, + OraclePartitionOption, OrcCompressionCodec, ParameterType, ParquetCompressionCodec, PhoenixAuthenticationType, + PipelineReferenceType, PluginCurrentState, + PolybaseSettingsRejectType, PrestoAuthenticationType, + PrivateLinkServiceConnectionStateStatus, + RecurrenceFrequency, + ResourceIdentityType, RestServiceAuthenticationType, RunQueryFilterOperand, RunQueryFilterOperator, RunQueryOrder, RunQueryOrderByField, + SalesforceSinkWriteBehavior, + SalesforceSourceReadBehavior, + SapCloudForCustomerSinkWriteBehavior, SapHanaAuthenticationType, + SapHanaPartitionOption, + SapTablePartitionOption, SchedulerCurrentState, ServiceNowAuthenticationType, SftpAuthenticationType, SparkAuthenticationType, SparkBatchJobResultType, SparkErrorSource, + SparkJobReferenceType, SparkJobType, SparkServerType, SparkThriftTransportProtocol, SqlConnectionType, + SqlPoolReferenceType, + SqlScriptType, + SsisLogLocationType, SsisPackageLocationType, StoredProcedureParameterType, SybaseAuthenticationType, TeradataAuthenticationType, + TeradataPartitionOption, + TriggerReferenceType, TriggerRunStatus, TriggerRuntimeState, + TumblingWindowFrequency, + Type, VariableType, WebActivityMethod, WebAuthenticationType, + WebHookActivityMethod, ) __all__ = [ @@ -783,21 +1297,49 @@ 'AddDataFlowToDebugSessionResponse', 'AmazonMWSLinkedService', 'AmazonMWSObjectDataset', + 'AmazonMWSSource', 'AmazonRedshiftLinkedService', + 'AmazonRedshiftSource', 'AmazonRedshiftTableDataset', 'AmazonS3LinkedService', + 'AmazonS3Location', + 'AmazonS3ReadSettings', 'AppendVariableActivity', + 'AutoPauseProperties', + 'AutoScaleProperties', 'AvroDataset', + 'AvroFormat', + 'AvroSink', + 'AvroSource', + 'AvroWriteSettings', 'AzureBatchLinkedService', 'AzureBlobFSLinkedService', + 'AzureBlobFSLocation', + 'AzureBlobFSReadSettings', + 'AzureBlobFSSink', + 'AzureBlobFSSource', + 'AzureBlobFSWriteSettings', 'AzureBlobStorageLinkedService', + 'AzureBlobStorageLocation', + 'AzureBlobStorageReadSettings', + 'AzureBlobStorageWriteSettings', 'AzureDataExplorerCommandActivity', 'AzureDataExplorerLinkedService', + 'AzureDataExplorerSink', + 'AzureDataExplorerSource', 'AzureDataExplorerTableDataset', 'AzureDataLakeAnalyticsLinkedService', 'AzureDataLakeStoreLinkedService', + 'AzureDataLakeStoreLocation', + 'AzureDataLakeStoreReadSettings', + 'AzureDataLakeStoreSink', + 'AzureDataLakeStoreSource', + 'AzureDataLakeStoreWriteSettings', 'AzureDatabricksLinkedService', + 'AzureEntityResource', 'AzureFileStorageLinkedService', + 'AzureFileStorageLocation', + 'AzureFileStorageReadSettings', 'AzureFunctionActivity', 'AzureFunctionLinkedService', 'AzureKeyVaultLinkedService', @@ -809,40 +1351,70 @@ 'AzureMLUpdateResourceActivity', 'AzureMLWebServiceFile', 'AzureMariaDBLinkedService', + 'AzureMariaDBSource', 'AzureMariaDBTableDataset', 'AzureMySqlLinkedService', + 'AzureMySqlSink', + 'AzureMySqlSource', 'AzureMySqlTableDataset', 'AzurePostgreSqlLinkedService', + 'AzurePostgreSqlSink', + 'AzurePostgreSqlSource', 'AzurePostgreSqlTableDataset', + 'AzureQueueSink', 'AzureSearchIndexDataset', + 'AzureSearchIndexSink', 'AzureSearchLinkedService', 'AzureSqlDWLinkedService', 'AzureSqlDWTableDataset', 'AzureSqlDatabaseLinkedService', 'AzureSqlMILinkedService', 'AzureSqlMITableDataset', + 'AzureSqlSink', + 'AzureSqlSource', 'AzureSqlTableDataset', 'AzureStorageLinkedService', 'AzureTableDataset', + 'AzureTableSink', + 'AzureTableSource', 'AzureTableStorageLinkedService', 'BigDataPoolReference', + 'BigDataPoolResourceInfo', + 'BigDataPoolResourceInfoListResult', 'BinaryDataset', + 'BinarySink', + 'BinarySource', + 'BlobEventsTrigger', + 'BlobSink', + 'BlobSource', + 'BlobTrigger', 'CassandraLinkedService', + 'CassandraSource', 'CassandraTableDataset', + 'ChainingTrigger', 'CloudError', 'CommonDataServiceForAppsEntityDataset', 'CommonDataServiceForAppsLinkedService', + 'CommonDataServiceForAppsSink', + 'CommonDataServiceForAppsSource', 'ConcurLinkedService', 'ConcurObjectDataset', + 'ConcurSource', 'ControlActivity', 'CopyActivity', 'CopySink', 'CopySource', + 'CopyTranslator', 'CosmosDbLinkedService', 'CosmosDbMongoDbApiCollectionDataset', 'CosmosDbMongoDbApiLinkedService', + 'CosmosDbMongoDbApiSink', + 'CosmosDbMongoDbApiSource', 'CosmosDbSqlApiCollectionDataset', + 'CosmosDbSqlApiSink', + 'CosmosDbSqlApiSource', 'CouchbaseLinkedService', + 'CouchbaseSource', 'CouchbaseTableDataset', 'CreateDataFlowDebugSessionRequest', 'CreateDataFlowDebugSessionResponse', @@ -850,6 +1422,10 @@ 'CustomActivity', 'CustomActivityReferenceObject', 'CustomDataSourceLinkedService', + 'CustomDataset', + 'CustomSetupBase', + 'DWCopyCommandDefaultValue', + 'DWCopyCommandSettings', 'DataFlow', 'DataFlowDebugCommandRequest', 'DataFlowDebugCommandResponse', @@ -870,12 +1446,14 @@ 'DataFlowSourceSetting', 'DataFlowStagingInfo', 'DataLakeAnalyticsUSQLActivity', + 'DataLakeStorageAccountDetails', 'DatabricksNotebookActivity', 'DatabricksSparkJarActivity', 'DatabricksSparkPythonActivity', 'Dataset', 'DatasetBZip2Compression', 'DatasetCompression', + 'DatasetDataElement', 'DatasetDebugResource', 'DatasetDeflateCompression', 'DatasetFolder', @@ -884,23 +1462,45 @@ 'DatasetLocation', 'DatasetReference', 'DatasetResource', + 'DatasetSchemaDataElement', + 'DatasetStorageFormat', 'DatasetZipDeflateCompression', 'Db2LinkedService', + 'Db2Source', 'Db2TableDataset', 'DeleteActivity', 'DeleteDataFlowDebugSessionRequest', 'DelimitedTextDataset', + 'DelimitedTextReadSettings', + 'DelimitedTextSink', + 'DelimitedTextSource', + 'DelimitedTextWriteSettings', + 'DependencyReference', + 'DistcpSettings', 'DocumentDbCollectionDataset', + 'DocumentDbCollectionSink', + 'DocumentDbCollectionSource', 'DrillLinkedService', + 'DrillSource', 'DrillTableDataset', 'DynamicsAXLinkedService', 'DynamicsAXResourceDataset', + 'DynamicsAXSource', 'DynamicsCrmEntityDataset', 'DynamicsCrmLinkedService', + 'DynamicsCrmSink', + 'DynamicsCrmSource', 'DynamicsEntityDataset', 'DynamicsLinkedService', + 'DynamicsSink', + 'DynamicsSource', 'EloquaLinkedService', 'EloquaObjectDataset', + 'EloquaSource', + 'EntityReference', + 'ErrorAdditionalInfo', + 'ErrorContract', + 'ErrorResponse', 'EvaluateDataFlowExpressionRequest', 'ExecuteDataFlowActivity', 'ExecuteDataFlowActivityTypePropertiesCompute', @@ -911,20 +1511,35 @@ 'ExposureControlResponse', 'Expression', 'FileServerLinkedService', + 'FileServerLocation', + 'FileServerReadSettings', + 'FileServerWriteSettings', + 'FileSystemSink', + 'FileSystemSource', 'FilterActivity', 'ForEachActivity', + 'FormatReadSettings', + 'FormatWriteSettings', + 'FtpReadSettings', 'FtpServerLinkedService', + 'FtpServerLocation', 'GetMetadataActivity', 'GetSsisObjectMetadataRequest', 'GoogleAdWordsLinkedService', 'GoogleAdWordsObjectDataset', + 'GoogleAdWordsSource', 'GoogleBigQueryLinkedService', 'GoogleBigQueryObjectDataset', + 'GoogleBigQuerySource', 'GoogleCloudStorageLinkedService', + 'GoogleCloudStorageLocation', + 'GoogleCloudStorageReadSettings', 'GreenplumLinkedService', + 'GreenplumSource', 'GreenplumTableDataset', 'HBaseLinkedService', 'HBaseObjectDataset', + 'HBaseSource', 'HDInsightHiveActivity', 'HDInsightLinkedService', 'HDInsightMapReduceActivity', @@ -933,20 +1548,50 @@ 'HDInsightSparkActivity', 'HDInsightStreamingActivity', 'HdfsLinkedService', + 'HdfsLocation', + 'HdfsReadSettings', + 'HdfsSource', 'HiveLinkedService', 'HiveObjectDataset', + 'HiveSource', 'HttpLinkedService', + 'HttpReadSettings', + 'HttpServerLocation', + 'HttpSource', 'HubspotLinkedService', 'HubspotObjectDataset', + 'HubspotSource', 'IfConditionActivity', 'ImpalaLinkedService', 'ImpalaObjectDataset', + 'ImpalaSource', 'InformixLinkedService', + 'InformixSink', + 'InformixSource', 'InformixTableDataset', + 'IntegrationRuntime', + 'IntegrationRuntimeComputeProperties', + 'IntegrationRuntimeCustomSetupScriptProperties', + 'IntegrationRuntimeDataFlowProperties', + 'IntegrationRuntimeDataProxyProperties', + 'IntegrationRuntimeListResponse', 'IntegrationRuntimeReference', + 'IntegrationRuntimeResource', + 'IntegrationRuntimeSsisCatalogInfo', + 'IntegrationRuntimeSsisProperties', + 'IntegrationRuntimeVNetProperties', 'JiraLinkedService', 'JiraObjectDataset', + 'JiraSource', 'JsonDataset', + 'JsonFormat', + 'JsonSink', + 'JsonSource', + 'JsonWriteSettings', + 'LibraryRequirements', + 'LinkedIntegrationRuntimeKeyAuthorization', + 'LinkedIntegrationRuntimeRbacAuthorization', + 'LinkedIntegrationRuntimeType', 'LinkedService', 'LinkedServiceDebugResource', 'LinkedServiceListResponse', @@ -956,21 +1601,34 @@ 'LookupActivity', 'MagentoLinkedService', 'MagentoObjectDataset', + 'MagentoSource', + 'ManagedIdentity', + 'ManagedIntegrationRuntime', 'MappingDataFlow', 'MariaDBLinkedService', + 'MariaDBSource', 'MariaDBTableDataset', 'MarketoLinkedService', 'MarketoObjectDataset', + 'MarketoSource', 'MicrosoftAccessLinkedService', + 'MicrosoftAccessSink', + 'MicrosoftAccessSource', 'MicrosoftAccessTableDataset', 'MongoDbCollectionDataset', + 'MongoDbCursorMethodsProperties', 'MongoDbLinkedService', + 'MongoDbSource', 'MongoDbV2CollectionDataset', 'MongoDbV2LinkedService', + 'MongoDbV2Source', 'MultiplePipelineTrigger', 'MySqlLinkedService', + 'MySqlSource', 'MySqlTableDataset', 'NetezzaLinkedService', + 'NetezzaPartitionSettings', + 'NetezzaSource', 'NetezzaTableDataset', 'Notebook', 'NotebookCell', @@ -983,21 +1641,37 @@ 'NotebookSessionProperties', 'ODataLinkedService', 'ODataResourceDataset', + 'ODataSource', 'OdbcLinkedService', + 'OdbcSink', + 'OdbcSource', 'OdbcTableDataset', 'Office365Dataset', 'Office365LinkedService', + 'Office365Source', 'OracleLinkedService', + 'OraclePartitionSettings', 'OracleServiceCloudLinkedService', 'OracleServiceCloudObjectDataset', + 'OracleServiceCloudSource', + 'OracleSink', + 'OracleSource', 'OracleTableDataset', 'OrcDataset', + 'OrcFormat', + 'OrcSink', + 'OrcSource', 'ParameterSpecification', 'ParquetDataset', + 'ParquetFormat', + 'ParquetSink', + 'ParquetSource', 'PaypalLinkedService', 'PaypalObjectDataset', + 'PaypalSource', 'PhoenixLinkedService', 'PhoenixObjectDataset', + 'PhoenixSource', 'PipelineFolder', 'PipelineListResponse', 'PipelineReference', @@ -1005,14 +1679,26 @@ 'PipelineRun', 'PipelineRunInvokedBy', 'PipelineRunsQueryResponse', + 'PolybaseSettings', 'PostgreSqlLinkedService', + 'PostgreSqlSource', 'PostgreSqlTableDataset', 'PrestoLinkedService', 'PrestoObjectDataset', + 'PrestoSource', + 'PrivateEndpoint', + 'PrivateEndpointConnection', + 'PrivateLinkServiceConnectionState', + 'ProxyResource', 'QueryDataFlowDebugSessionsResponse', 'QuickBooksLinkedService', 'QuickBooksObjectDataset', + 'QuickBooksSource', + 'RecurrenceSchedule', + 'RecurrenceScheduleOccurrence', 'RedirectIncompatibleRowSettings', + 'RedshiftUnloadSettings', + 'RelationalSource', 'RelationalTableDataset', 'RerunTriggerListResponse', 'RerunTriggerResource', @@ -1021,8 +1707,11 @@ 'Resource', 'ResponsysLinkedService', 'ResponsysObjectDataset', + 'ResponsysSource', 'RestResourceDataset', 'RestServiceLinkedService', + 'RestSource', + 'RetryPolicy', 'RunFilterParameters', 'RunQueryFilter', 'RunQueryOrderBy', @@ -1036,30 +1725,54 @@ 'SalesforceLinkedService', 'SalesforceMarketingCloudLinkedService', 'SalesforceMarketingCloudObjectDataset', + 'SalesforceMarketingCloudSource', 'SalesforceObjectDataset', 'SalesforceServiceCloudLinkedService', 'SalesforceServiceCloudObjectDataset', + 'SalesforceServiceCloudSink', + 'SalesforceServiceCloudSource', + 'SalesforceSink', + 'SalesforceSource', 'SapBWLinkedService', 'SapBwCubeDataset', + 'SapBwSource', 'SapCloudForCustomerLinkedService', 'SapCloudForCustomerResourceDataset', + 'SapCloudForCustomerSink', + 'SapCloudForCustomerSource', 'SapEccLinkedService', 'SapEccResourceDataset', + 'SapEccSource', 'SapHanaLinkedService', + 'SapHanaPartitionSettings', + 'SapHanaSource', 'SapHanaTableDataset', 'SapOpenHubLinkedService', + 'SapOpenHubSource', 'SapOpenHubTableDataset', 'SapTableLinkedService', + 'SapTablePartitionSettings', 'SapTableResourceDataset', + 'SapTableSource', + 'ScheduleTrigger', + 'ScheduleTriggerRecurrence', 'ScriptAction', 'SecretBase', 'SecureString', + 'SelfDependencyTumblingWindowTriggerReference', + 'SelfHostedIntegrationRuntime', 'ServiceNowLinkedService', 'ServiceNowObjectDataset', + 'ServiceNowSource', 'SetVariableActivity', + 'SftpLocation', + 'SftpReadSettings', 'SftpServerLinkedService', + 'SftpWriteSettings', 'ShopifyLinkedService', 'ShopifyObjectDataset', + 'ShopifySource', + 'Sku', 'SparkBatchJob', 'SparkBatchJobState', 'SparkJobDefinition', @@ -1072,45 +1785,78 @@ 'SparkScheduler', 'SparkServiceError', 'SparkServicePlugin', + 'SparkSource', 'SqlConnection', + 'SqlDWSink', + 'SqlDWSource', + 'SqlMISink', + 'SqlMISource', + 'SqlPool', + 'SqlPoolInfoListResult', + 'SqlPoolReference', + 'SqlPoolStoredProcedureActivity', 'SqlScript', 'SqlScriptContent', 'SqlScriptMetadata', 'SqlScriptResource', 'SqlScriptsListResponse', 'SqlServerLinkedService', + 'SqlServerSink', + 'SqlServerSource', 'SqlServerStoredProcedureActivity', 'SqlServerTableDataset', + 'SqlSink', + 'SqlSource', 'SquareLinkedService', 'SquareObjectDataset', + 'SquareSource', 'SsisObjectMetadataStatusResponse', 'StagingSettings', 'StartDataFlowDebugSessionRequest', 'StartDataFlowDebugSessionResponse', + 'StoreReadSettings', + 'StoreWriteSettings', 'StoredProcedureParameter', 'SubResource', 'SubResourceDebugResource', 'SwitchActivity', 'SwitchCase', 'SybaseLinkedService', + 'SybaseSource', 'SybaseTableDataset', + 'SynapseNotebookActivity', + 'SynapseNotebookReference', + 'SynapseSparkJobDefinitionActivity', + 'SynapseSparkJobReference', + 'TabularSource', + 'TabularTranslator', 'TeradataLinkedService', + 'TeradataPartitionSettings', + 'TeradataSource', 'TeradataTableDataset', + 'TextFormat', + 'TrackedResource', 'Transformation', 'Trigger', 'TriggerDependencyProvisioningStatus', + 'TriggerDependencyReference', 'TriggerListResponse', 'TriggerPipelineReference', + 'TriggerReference', 'TriggerResource', 'TriggerRun', 'TriggerRunsQueryResponse', 'TriggerSubscriptionOperationStatus', + 'TumblingWindowTrigger', + 'TumblingWindowTriggerDependencyReference', 'UntilActivity', 'UserProperty', 'ValidationActivity', 'VariableSpecification', 'VerticaLinkedService', + 'VerticaSource', 'VerticaTableDataset', + 'VirtualNetworkProfile', 'WaitActivity', 'WebActivity', 'WebActivityAuthentication', @@ -1120,25 +1866,39 @@ 'WebHookActivity', 'WebLinkedService', 'WebLinkedServiceTypeProperties', + 'WebSource', 'WebTableDataset', 'Workspace', 'WorkspaceIdentity', 'WorkspaceUpdateParameters', 'XeroLinkedService', 'XeroObjectDataset', + 'XeroSource', 'ZohoLinkedService', 'ZohoObjectDataset', + 'ZohoSource', 'AvroCompressionCodec', 'AzureFunctionActivityMethod', + 'AzureSearchIndexWriteBehaviorType', + 'BigDataPoolReferenceType', + 'BlobEventTypes', + 'CassandraSourceReadConsistencyLevels', 'CellOutputType', + 'CopyBehaviorType', 'DataFlowComputeType', + 'DataFlowReferenceType', 'DatasetCompressionLevel', + 'DatasetReferenceType', + 'DayOfWeek', + 'Db2AuthenticationType', 'DelimitedTextCompressionCodec', 'DependencyCondition', 'DynamicsAuthenticationType', 'DynamicsDeploymentType', 'DynamicsServicePrincipalCredentialType', + 'DynamicsSinkWriteBehavior', 'EventSubscriptionStatus', + 'ExpressionType', 'FtpAuthenticationType', 'GoogleAdWordsAuthenticationType', 'GoogleBigQueryAuthenticationType', @@ -1150,38 +1910,71 @@ 'HiveThriftTransportProtocol', 'HttpAuthenticationType', 'ImpalaAuthenticationType', + 'IntegrationRuntimeEdition', + 'IntegrationRuntimeEntityReferenceType', + 'IntegrationRuntimeLicenseType', + 'IntegrationRuntimeReferenceType', + 'IntegrationRuntimeSsisCatalogPricingTier', + 'IntegrationRuntimeState', + 'IntegrationRuntimeType', + 'JsonFormatFilePattern', + 'JsonWriteFilePattern', 'MongoDbAuthenticationType', + 'NetezzaPartitionOption', + 'NodeSize', + 'NodeSizeFamily', + 'NotebookReferenceType', 'ODataAadServicePrincipalCredentialType', 'ODataAuthenticationType', + 'OraclePartitionOption', 'OrcCompressionCodec', 'ParameterType', 'ParquetCompressionCodec', 'PhoenixAuthenticationType', + 'PipelineReferenceType', 'PluginCurrentState', + 'PolybaseSettingsRejectType', 'PrestoAuthenticationType', + 'PrivateLinkServiceConnectionStateStatus', + 'RecurrenceFrequency', + 'ResourceIdentityType', 'RestServiceAuthenticationType', 'RunQueryFilterOperand', 'RunQueryFilterOperator', 'RunQueryOrder', 'RunQueryOrderByField', + 'SalesforceSinkWriteBehavior', + 'SalesforceSourceReadBehavior', + 'SapCloudForCustomerSinkWriteBehavior', 'SapHanaAuthenticationType', + 'SapHanaPartitionOption', + 'SapTablePartitionOption', 'SchedulerCurrentState', 'ServiceNowAuthenticationType', 'SftpAuthenticationType', 'SparkAuthenticationType', 'SparkBatchJobResultType', 'SparkErrorSource', + 'SparkJobReferenceType', 'SparkJobType', 'SparkServerType', 'SparkThriftTransportProtocol', 'SqlConnectionType', + 'SqlPoolReferenceType', + 'SqlScriptType', + 'SsisLogLocationType', 'SsisPackageLocationType', 'StoredProcedureParameterType', 'SybaseAuthenticationType', 'TeradataAuthenticationType', + 'TeradataPartitionOption', + 'TriggerReferenceType', 'TriggerRunStatus', 'TriggerRuntimeState', + 'TumblingWindowFrequency', + 'Type', 'VariableType', 'WebActivityMethod', 'WebAuthenticationType', + 'WebHookActivityMethod', ] diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_artifacts_client_enums.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_artifacts_client_enums.py index 133cdabe106e..11cc7a225ebd 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_artifacts_client_enums.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_artifacts_client_enums.py @@ -6,485 +6,839 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from enum import Enum +from enum import Enum, EnumMeta +from six import with_metaclass + +class _CaseInsensitiveEnumMeta(EnumMeta): + def __getitem__(self, name): + return super().__getitem__(name.upper()) + + def __getattr__(cls, name): + """Return the enum member matching `name` + We use __getattr__ instead of descriptors or inserting into the enum + class' __dict__ in order to support `name` and `value` being both + properties for enum members (which live in the class' __dict__) and + enum members themselves. + """ + try: + return cls._member_map_[name.upper()] + except KeyError: + raise AttributeError(name) + + +class AvroCompressionCodec(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + + NONE = "none" + DEFLATE = "deflate" + SNAPPY = "snappy" + XZ = "xz" + BZIP2 = "bzip2" + +class AzureFunctionActivityMethod(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The list of HTTP methods supported by a AzureFunctionActivity. + """ -class AvroCompressionCodec(str, Enum): + GET = "GET" + POST = "POST" + PUT = "PUT" + DELETE = "DELETE" + OPTIONS = "OPTIONS" + HEAD = "HEAD" + TRACE = "TRACE" - none = "none" - deflate = "deflate" - snappy = "snappy" - xz = "xz" - bzip2 = "bzip2" +class AzureSearchIndexWriteBehaviorType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Specify the write behavior when upserting documents into Azure Search Index. + """ -class AzureFunctionActivityMethod(str, Enum): - """The list of HTTP methods supported by a AzureFunctionActivity. + MERGE = "Merge" + UPLOAD = "Upload" + +class BigDataPoolReferenceType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Big data pool reference type. """ - get = "GET" - post = "POST" - put = "PUT" - delete = "DELETE" - options = "OPTIONS" - head = "HEAD" - trace = "TRACE" + BIG_DATA_POOL_REFERENCE = "BigDataPoolReference" + +class BlobEventTypes(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + + MICROSOFT_STORAGE_BLOB_CREATED = "Microsoft.Storage.BlobCreated" + MICROSOFT_STORAGE_BLOB_DELETED = "Microsoft.Storage.BlobDeleted" + +class CassandraSourceReadConsistencyLevels(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The consistency level specifies how many Cassandra servers must respond to a read request + before returning data to the client application. Cassandra checks the specified number of + Cassandra servers for data to satisfy the read request. Must be one of + cassandraSourceReadConsistencyLevels. The default value is 'ONE'. It is case-insensitive. + """ -class CellOutputType(str, Enum): + ALL = "ALL" + EACH_QUORUM = "EACH_QUORUM" + QUORUM = "QUORUM" + LOCAL_QUORUM = "LOCAL_QUORUM" + ONE = "ONE" + TWO = "TWO" + THREE = "THREE" + LOCAL_ONE = "LOCAL_ONE" + SERIAL = "SERIAL" + LOCAL_SERIAL = "LOCAL_SERIAL" + +class CellOutputType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Execution, display, or stream outputs. """ - execute_result = "execute_result" - display_data = "display_data" - stream = "stream" - error = "error" + EXECUTE_RESULT = "execute_result" + DISPLAY_DATA = "display_data" + STREAM = "stream" + ERROR = "error" + +class CopyBehaviorType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """All available types of copy behavior. + """ + + PRESERVE_HIERARCHY = "PreserveHierarchy" + FLATTEN_HIERARCHY = "FlattenHierarchy" + MERGE_FILES = "MergeFiles" -class DataFlowComputeType(str, Enum): +class DataFlowComputeType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Compute type of the cluster which will execute data flow job. """ - general = "General" - memory_optimized = "MemoryOptimized" - compute_optimized = "ComputeOptimized" + GENERAL = "General" + MEMORY_OPTIMIZED = "MemoryOptimized" + COMPUTE_OPTIMIZED = "ComputeOptimized" -class DatasetCompressionLevel(str, Enum): +class DataFlowReferenceType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Data flow reference type. + """ + + DATA_FLOW_REFERENCE = "DataFlowReference" + +class DatasetCompressionLevel(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """All available compression levels. """ - optimal = "Optimal" - fastest = "Fastest" + OPTIMAL = "Optimal" + FASTEST = "Fastest" -class DelimitedTextCompressionCodec(str, Enum): +class DatasetReferenceType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Dataset reference type. + """ - bzip2 = "bzip2" - gzip = "gzip" - deflate = "deflate" - zip_deflate = "zipDeflate" - snappy = "snappy" - lz4 = "lz4" + DATASET_REFERENCE = "DatasetReference" -class DependencyCondition(str, Enum): +class DayOfWeek(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - succeeded = "Succeeded" - failed = "Failed" - skipped = "Skipped" - completed = "Completed" + SUNDAY = "Sunday" + MONDAY = "Monday" + TUESDAY = "Tuesday" + WEDNESDAY = "Wednesday" + THURSDAY = "Thursday" + FRIDAY = "Friday" + SATURDAY = "Saturday" -class DynamicsAuthenticationType(str, Enum): +class Db2AuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """AuthenticationType to be used for connection. + """ + + BASIC = "Basic" + +class DelimitedTextCompressionCodec(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + + BZIP2 = "bzip2" + GZIP = "gzip" + DEFLATE = "deflate" + ZIP_DEFLATE = "zipDeflate" + SNAPPY = "snappy" + LZ4 = "lz4" + +class DependencyCondition(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + + SUCCEEDED = "Succeeded" + FAILED = "Failed" + SKIPPED = "Skipped" + COMPLETED = "Completed" + +class DynamicsAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The authentication type to connect to Dynamics server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or Expression with resultType string). """ - office365 = "Office365" - ifd = "Ifd" - aad_service_principal = "AADServicePrincipal" + OFFICE365 = "Office365" + IFD = "Ifd" + AAD_SERVICE_PRINCIPAL = "AADServicePrincipal" -class DynamicsDeploymentType(str, Enum): +class DynamicsDeploymentType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The deployment type of the Dynamics instance. 'Online' for Dynamics Online and 'OnPremisesWithIfd' for Dynamics on-premises with Ifd. Type: string (or Expression with resultType string). """ - online = "Online" - on_premises_with_ifd = "OnPremisesWithIfd" + ONLINE = "Online" + ON_PREMISES_WITH_IFD = "OnPremisesWithIfd" -class DynamicsServicePrincipalCredentialType(str, Enum): +class DynamicsServicePrincipalCredentialType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or Expression with resultType string). """ - service_principal_key = "ServicePrincipalKey" - service_principal_cert = "ServicePrincipalCert" + SERVICE_PRINCIPAL_KEY = "ServicePrincipalKey" + SERVICE_PRINCIPAL_CERT = "ServicePrincipalCert" -class EventSubscriptionStatus(str, Enum): +class DynamicsSinkWriteBehavior(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The write behavior for the operation. + """ + + UPSERT = "Upsert" + +class EventSubscriptionStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Event Subscription Status. """ - enabled = "Enabled" - provisioning = "Provisioning" - deprovisioning = "Deprovisioning" - disabled = "Disabled" - unknown = "Unknown" + ENABLED = "Enabled" + PROVISIONING = "Provisioning" + DEPROVISIONING = "Deprovisioning" + DISABLED = "Disabled" + UNKNOWN = "Unknown" + +class ExpressionType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Expression type. + """ + + EXPRESSION = "Expression" -class FtpAuthenticationType(str, Enum): +class FtpAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The authentication type to be used to connect to the FTP server. """ - basic = "Basic" - anonymous = "Anonymous" + BASIC = "Basic" + ANONYMOUS = "Anonymous" -class GoogleAdWordsAuthenticationType(str, Enum): +class GoogleAdWordsAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The OAuth 2.0 authentication mechanism used for authentication. ServiceAuthentication can only be used on self-hosted IR. """ - service_authentication = "ServiceAuthentication" - user_authentication = "UserAuthentication" + SERVICE_AUTHENTICATION = "ServiceAuthentication" + USER_AUTHENTICATION = "UserAuthentication" -class GoogleBigQueryAuthenticationType(str, Enum): +class GoogleBigQueryAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The OAuth 2.0 authentication mechanism used for authentication. ServiceAuthentication can only be used on self-hosted IR. """ - service_authentication = "ServiceAuthentication" - user_authentication = "UserAuthentication" + SERVICE_AUTHENTICATION = "ServiceAuthentication" + USER_AUTHENTICATION = "UserAuthentication" -class HBaseAuthenticationType(str, Enum): +class HBaseAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The authentication mechanism to use to connect to the HBase server. """ - anonymous = "Anonymous" - basic = "Basic" + ANONYMOUS = "Anonymous" + BASIC = "Basic" -class HdiNodeTypes(str, Enum): +class HdiNodeTypes(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The node types on which the script action should be executed. """ - headnode = "Headnode" - workernode = "Workernode" - zookeeper = "Zookeeper" + HEADNODE = "Headnode" + WORKERNODE = "Workernode" + ZOOKEEPER = "Zookeeper" -class HDInsightActivityDebugInfoOption(str, Enum): +class HDInsightActivityDebugInfoOption(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The HDInsightActivityDebugInfoOption settings to use. """ - none = "None" - always = "Always" - failure = "Failure" + NONE = "None" + ALWAYS = "Always" + FAILURE = "Failure" -class HiveAuthenticationType(str, Enum): +class HiveAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The authentication method used to access the Hive server. """ - anonymous = "Anonymous" - username = "Username" - username_and_password = "UsernameAndPassword" - windows_azure_hd_insight_service = "WindowsAzureHDInsightService" + ANONYMOUS = "Anonymous" + USERNAME = "Username" + USERNAME_AND_PASSWORD = "UsernameAndPassword" + WINDOWS_AZURE_HD_INSIGHT_SERVICE = "WindowsAzureHDInsightService" -class HiveServerType(str, Enum): +class HiveServerType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The type of Hive server. """ - hive_server1 = "HiveServer1" - hive_server2 = "HiveServer2" - hive_thrift_server = "HiveThriftServer" + HIVE_SERVER1 = "HiveServer1" + HIVE_SERVER2 = "HiveServer2" + HIVE_THRIFT_SERVER = "HiveThriftServer" -class HiveThriftTransportProtocol(str, Enum): +class HiveThriftTransportProtocol(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The transport protocol to use in the Thrift layer. """ - binary = "Binary" - sasl = "SASL" - http = "HTTP " + BINARY = "Binary" + SASL = "SASL" + HTTP = "HTTP " -class HttpAuthenticationType(str, Enum): +class HttpAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The authentication type to be used to connect to the HTTP server. """ - basic = "Basic" - anonymous = "Anonymous" - digest = "Digest" - windows = "Windows" - client_certificate = "ClientCertificate" + BASIC = "Basic" + ANONYMOUS = "Anonymous" + DIGEST = "Digest" + WINDOWS = "Windows" + CLIENT_CERTIFICATE = "ClientCertificate" -class ImpalaAuthenticationType(str, Enum): +class ImpalaAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The authentication type to use. """ - anonymous = "Anonymous" - sasl_username = "SASLUsername" - username_and_password = "UsernameAndPassword" + ANONYMOUS = "Anonymous" + SASL_USERNAME = "SASLUsername" + USERNAME_AND_PASSWORD = "UsernameAndPassword" + +class IntegrationRuntimeEdition(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The edition for the SSIS Integration Runtime + """ -class MongoDbAuthenticationType(str, Enum): + STANDARD = "Standard" + ENTERPRISE = "Enterprise" + +class IntegrationRuntimeEntityReferenceType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The type of this referenced entity. + """ + + INTEGRATION_RUNTIME_REFERENCE = "IntegrationRuntimeReference" + LINKED_SERVICE_REFERENCE = "LinkedServiceReference" + +class IntegrationRuntimeLicenseType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """License type for bringing your own license scenario. + """ + + BASE_PRICE = "BasePrice" + LICENSE_INCLUDED = "LicenseIncluded" + +class IntegrationRuntimeReferenceType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Type of integration runtime. + """ + + INTEGRATION_RUNTIME_REFERENCE = "IntegrationRuntimeReference" + +class IntegrationRuntimeSsisCatalogPricingTier(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The pricing tier for the catalog database. The valid values could be found in + https://azure.microsoft.com/en-us/pricing/details/sql-database/ + """ + + BASIC = "Basic" + STANDARD = "Standard" + PREMIUM = "Premium" + PREMIUM_RS = "PremiumRS" + +class IntegrationRuntimeState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The state of integration runtime. + """ + + INITIAL = "Initial" + STOPPED = "Stopped" + STARTED = "Started" + STARTING = "Starting" + STOPPING = "Stopping" + NEED_REGISTRATION = "NeedRegistration" + ONLINE = "Online" + LIMITED = "Limited" + OFFLINE = "Offline" + ACCESS_DENIED = "AccessDenied" + +class IntegrationRuntimeType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The type of integration runtime. + """ + + MANAGED = "Managed" + SELF_HOSTED = "SelfHosted" + +class JsonFormatFilePattern(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """JSON format file pattern. A property of JsonFormat. + """ + + SET_OF_OBJECTS = "setOfObjects" + ARRAY_OF_OBJECTS = "arrayOfObjects" + +class JsonWriteFilePattern(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """File pattern of JSON. This setting controls the way a collection of JSON objects will be + treated. The default value is 'setOfObjects'. It is case-sensitive. + """ + + SET_OF_OBJECTS = "setOfObjects" + ARRAY_OF_OBJECTS = "arrayOfObjects" + +class MongoDbAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The authentication type to be used to connect to the MongoDB database. """ - basic = "Basic" - anonymous = "Anonymous" + BASIC = "Basic" + ANONYMOUS = "Anonymous" + +class NetezzaPartitionOption(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The partition mechanism that will be used for Netezza read in parallel. + """ + + NONE = "None" + DATA_SLICE = "DataSlice" + DYNAMIC_RANGE = "DynamicRange" + +class NodeSize(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The level of compute power that each node in the Big Data pool has. + """ + + NONE = "None" + SMALL = "Small" + MEDIUM = "Medium" + LARGE = "Large" + X_LARGE = "XLarge" + XX_LARGE = "XXLarge" + +class NodeSizeFamily(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The kind of nodes that the Big Data pool provides. + """ + + NONE = "None" + MEMORY_OPTIMIZED = "MemoryOptimized" + +class NotebookReferenceType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Synapse notebook reference type. + """ + + NOTEBOOK_REFERENCE = "NotebookReference" -class ODataAadServicePrincipalCredentialType(str, Enum): +class ODataAadServicePrincipalCredentialType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Specify the credential type (key or cert) is used for service principal. """ - service_principal_key = "ServicePrincipalKey" - service_principal_cert = "ServicePrincipalCert" + SERVICE_PRINCIPAL_KEY = "ServicePrincipalKey" + SERVICE_PRINCIPAL_CERT = "ServicePrincipalCert" -class ODataAuthenticationType(str, Enum): +class ODataAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Type of authentication used to connect to the OData service. """ - basic = "Basic" - anonymous = "Anonymous" - windows = "Windows" - aad_service_principal = "AadServicePrincipal" - managed_service_identity = "ManagedServiceIdentity" + BASIC = "Basic" + ANONYMOUS = "Anonymous" + WINDOWS = "Windows" + AAD_SERVICE_PRINCIPAL = "AadServicePrincipal" + MANAGED_SERVICE_IDENTITY = "ManagedServiceIdentity" -class OrcCompressionCodec(str, Enum): +class OraclePartitionOption(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The partition mechanism that will be used for Oracle read in parallel. + """ + + NONE = "None" + PHYSICAL_PARTITIONS_OF_TABLE = "PhysicalPartitionsOfTable" + DYNAMIC_RANGE = "DynamicRange" - none = "none" - zlib = "zlib" - snappy = "snappy" +class OrcCompressionCodec(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): -class ParameterType(str, Enum): + NONE = "none" + ZLIB = "zlib" + SNAPPY = "snappy" + +class ParameterType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Parameter type. """ - object = "Object" - string = "String" - int = "Int" - float = "Float" - bool = "Bool" - array = "Array" - secure_string = "SecureString" + OBJECT = "Object" + STRING = "String" + INT = "Int" + FLOAT = "Float" + BOOL = "Bool" + ARRAY = "Array" + SECURE_STRING = "SecureString" -class ParquetCompressionCodec(str, Enum): +class ParquetCompressionCodec(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - none = "none" - gzip = "gzip" - snappy = "snappy" - lzo = "lzo" + NONE = "none" + GZIP = "gzip" + SNAPPY = "snappy" + LZO = "lzo" -class PhoenixAuthenticationType(str, Enum): +class PhoenixAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The authentication mechanism used to connect to the Phoenix server. """ - anonymous = "Anonymous" - username_and_password = "UsernameAndPassword" - windows_azure_hd_insight_service = "WindowsAzureHDInsightService" + ANONYMOUS = "Anonymous" + USERNAME_AND_PASSWORD = "UsernameAndPassword" + WINDOWS_AZURE_HD_INSIGHT_SERVICE = "WindowsAzureHDInsightService" -class PluginCurrentState(str, Enum): +class PipelineReferenceType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Pipeline reference type. + """ - preparation = "Preparation" - resource_acquisition = "ResourceAcquisition" - queued = "Queued" - submission = "Submission" - monitoring = "Monitoring" - cleanup = "Cleanup" - ended = "Ended" + PIPELINE_REFERENCE = "PipelineReference" -class PrestoAuthenticationType(str, Enum): +class PluginCurrentState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + + PREPARATION = "Preparation" + RESOURCE_ACQUISITION = "ResourceAcquisition" + QUEUED = "Queued" + SUBMISSION = "Submission" + MONITORING = "Monitoring" + CLEANUP = "Cleanup" + ENDED = "Ended" + +class PolybaseSettingsRejectType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Indicates whether the RejectValue property is specified as a literal value or a percentage. + """ + + VALUE = "value" + PERCENTAGE = "percentage" + +class PrestoAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The authentication mechanism used to connect to the Presto server. """ - anonymous = "Anonymous" - ldap = "LDAP" + ANONYMOUS = "Anonymous" + LDAP = "LDAP" + +class PrivateLinkServiceConnectionStateStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The private link service connection status. + """ + + APPROVED = "Approved" + PENDING = "Pending" + REJECTED = "Rejected" + DISCONNECTED = "Disconnected" -class RestServiceAuthenticationType(str, Enum): +class RecurrenceFrequency(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Enumerates possible frequency option for the schedule trigger. + """ + + NOT_SPECIFIED = "NotSpecified" + MINUTE = "Minute" + HOUR = "Hour" + DAY = "Day" + WEEK = "Week" + MONTH = "Month" + YEAR = "Year" + +class ResourceIdentityType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The type of managed identity for the workspace + """ + + NONE = "None" + SYSTEM_ASSIGNED = "SystemAssigned" + +class RestServiceAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Type of authentication used to connect to the REST service. """ - anonymous = "Anonymous" - basic = "Basic" - aad_service_principal = "AadServicePrincipal" - managed_service_identity = "ManagedServiceIdentity" + ANONYMOUS = "Anonymous" + BASIC = "Basic" + AAD_SERVICE_PRINCIPAL = "AadServicePrincipal" + MANAGED_SERVICE_IDENTITY = "ManagedServiceIdentity" -class RunQueryFilterOperand(str, Enum): +class RunQueryFilterOperand(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Parameter name to be used for filter. The allowed operands to query pipeline runs are PipelineName, RunStart, RunEnd and Status; to query activity runs are ActivityName, ActivityRunStart, ActivityRunEnd, ActivityType and Status, and to query trigger runs are TriggerName, TriggerRunTimestamp and Status. """ - pipeline_name = "PipelineName" - status = "Status" - run_start = "RunStart" - run_end = "RunEnd" - activity_name = "ActivityName" - activity_run_start = "ActivityRunStart" - activity_run_end = "ActivityRunEnd" - activity_type = "ActivityType" - trigger_name = "TriggerName" - trigger_run_timestamp = "TriggerRunTimestamp" - run_group_id = "RunGroupId" - latest_only = "LatestOnly" - -class RunQueryFilterOperator(str, Enum): + PIPELINE_NAME = "PipelineName" + STATUS = "Status" + RUN_START = "RunStart" + RUN_END = "RunEnd" + ACTIVITY_NAME = "ActivityName" + ACTIVITY_RUN_START = "ActivityRunStart" + ACTIVITY_RUN_END = "ActivityRunEnd" + ACTIVITY_TYPE = "ActivityType" + TRIGGER_NAME = "TriggerName" + TRIGGER_RUN_TIMESTAMP = "TriggerRunTimestamp" + RUN_GROUP_ID = "RunGroupId" + LATEST_ONLY = "LatestOnly" + +class RunQueryFilterOperator(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Operator to be used for filter. """ - equals = "Equals" - not_equals = "NotEquals" - in_enum = "In" - not_in = "NotIn" + EQUALS = "Equals" + NOT_EQUALS = "NotEquals" + IN_ENUM = "In" + NOT_IN = "NotIn" -class RunQueryOrder(str, Enum): +class RunQueryOrder(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Sorting order of the parameter. """ - asc = "ASC" - desc = "DESC" + ASC = "ASC" + DESC = "DESC" -class RunQueryOrderByField(str, Enum): +class RunQueryOrderByField(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Parameter name to be used for order by. The allowed parameters to order by for pipeline runs are PipelineName, RunStart, RunEnd and Status; for activity runs are ActivityName, ActivityRunStart, ActivityRunEnd and Status; for trigger runs are TriggerName, TriggerRunTimestamp and Status. """ - run_start = "RunStart" - run_end = "RunEnd" - pipeline_name = "PipelineName" - status = "Status" - activity_name = "ActivityName" - activity_run_start = "ActivityRunStart" - activity_run_end = "ActivityRunEnd" - trigger_name = "TriggerName" - trigger_run_timestamp = "TriggerRunTimestamp" + RUN_START = "RunStart" + RUN_END = "RunEnd" + PIPELINE_NAME = "PipelineName" + STATUS = "Status" + ACTIVITY_NAME = "ActivityName" + ACTIVITY_RUN_START = "ActivityRunStart" + ACTIVITY_RUN_END = "ActivityRunEnd" + TRIGGER_NAME = "TriggerName" + TRIGGER_RUN_TIMESTAMP = "TriggerRunTimestamp" + +class SalesforceSinkWriteBehavior(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The write behavior for the operation. Default is Insert. + """ + + INSERT = "Insert" + UPSERT = "Upsert" + +class SalesforceSourceReadBehavior(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The read behavior for the operation. Default is Query. + """ + + QUERY = "Query" + QUERY_ALL = "QueryAll" -class SapHanaAuthenticationType(str, Enum): +class SapCloudForCustomerSinkWriteBehavior(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The write behavior for the operation. Default is 'Insert'. + """ + + INSERT = "Insert" + UPDATE = "Update" + +class SapHanaAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The authentication type to be used to connect to the SAP HANA server. """ - basic = "Basic" - windows = "Windows" + BASIC = "Basic" + WINDOWS = "Windows" -class SchedulerCurrentState(str, Enum): +class SapHanaPartitionOption(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The partition mechanism that will be used for SAP HANA read in parallel. + """ - queued = "Queued" - scheduled = "Scheduled" - ended = "Ended" + NONE = "None" + PHYSICAL_PARTITIONS_OF_TABLE = "PhysicalPartitionsOfTable" + SAP_HANA_DYNAMIC_RANGE = "SapHanaDynamicRange" -class ServiceNowAuthenticationType(str, Enum): +class SapTablePartitionOption(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The partition mechanism that will be used for SAP table read in parallel. + """ + + NONE = "None" + PARTITION_ON_INT = "PartitionOnInt" + PARTITION_ON_CALENDAR_YEAR = "PartitionOnCalendarYear" + PARTITION_ON_CALENDAR_MONTH = "PartitionOnCalendarMonth" + PARTITION_ON_CALENDAR_DATE = "PartitionOnCalendarDate" + PARTITION_ON_TIME = "PartitionOnTime" + +class SchedulerCurrentState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + + QUEUED = "Queued" + SCHEDULED = "Scheduled" + ENDED = "Ended" + +class ServiceNowAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The authentication type to use. """ - basic = "Basic" - o_auth2 = "OAuth2" + BASIC = "Basic" + O_AUTH2 = "OAuth2" -class SftpAuthenticationType(str, Enum): +class SftpAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The authentication type to be used to connect to the FTP server. """ - basic = "Basic" - ssh_public_key = "SshPublicKey" + BASIC = "Basic" + SSH_PUBLIC_KEY = "SshPublicKey" -class SparkAuthenticationType(str, Enum): +class SparkAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The authentication method used to access the Spark server. """ - anonymous = "Anonymous" - username = "Username" - username_and_password = "UsernameAndPassword" - windows_azure_hd_insight_service = "WindowsAzureHDInsightService" + ANONYMOUS = "Anonymous" + USERNAME = "Username" + USERNAME_AND_PASSWORD = "UsernameAndPassword" + WINDOWS_AZURE_HD_INSIGHT_SERVICE = "WindowsAzureHDInsightService" -class SparkBatchJobResultType(str, Enum): +class SparkBatchJobResultType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The Spark batch job result. """ - uncertain = "Uncertain" - succeeded = "Succeeded" - failed = "Failed" - cancelled = "Cancelled" + UNCERTAIN = "Uncertain" + SUCCEEDED = "Succeeded" + FAILED = "Failed" + CANCELLED = "Cancelled" + +class SparkErrorSource(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): -class SparkErrorSource(str, Enum): + SYSTEM = "System" + USER = "User" + UNKNOWN = "Unknown" + DEPENDENCY = "Dependency" + +class SparkJobReferenceType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Synapse spark job reference type. + """ - system = "System" - user = "User" - unknown = "Unknown" - dependency = "Dependency" + SPARK_JOB_DEFINITION_REFERENCE = "SparkJobDefinitionReference" -class SparkJobType(str, Enum): +class SparkJobType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The job type. """ - spark_batch = "SparkBatch" - spark_session = "SparkSession" + SPARK_BATCH = "SparkBatch" + SPARK_SESSION = "SparkSession" -class SparkServerType(str, Enum): +class SparkServerType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The type of Spark server. """ - shark_server = "SharkServer" - shark_server2 = "SharkServer2" - spark_thrift_server = "SparkThriftServer" + SHARK_SERVER = "SharkServer" + SHARK_SERVER2 = "SharkServer2" + SPARK_THRIFT_SERVER = "SparkThriftServer" -class SparkThriftTransportProtocol(str, Enum): +class SparkThriftTransportProtocol(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The transport protocol to use in the Thrift layer. """ - binary = "Binary" - sasl = "SASL" - http = "HTTP " + BINARY = "Binary" + SASL = "SASL" + HTTP = "HTTP " -class SqlConnectionType(str, Enum): +class SqlConnectionType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The type of the connection. """ - sql_on_demand = "SqlOnDemand" - sql_pool = "SqlPool" + SQL_ON_DEMAND = "SqlOnDemand" + SQL_POOL = "SqlPool" + +class SqlPoolReferenceType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """SQL pool reference type. + """ + + SQL_POOL_REFERENCE = "SqlPoolReference" + +class SqlScriptType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The type of the SQL script. + """ + + SQL_QUERY = "SqlQuery" + +class SsisLogLocationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The type of SSIS log location. + """ + + FILE = "File" -class SsisPackageLocationType(str, Enum): +class SsisPackageLocationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The type of SSIS package location. """ - ssisdb = "SSISDB" - file = "File" - inline_package = "InlinePackage" + SSISDB = "SSISDB" + FILE = "File" + INLINE_PACKAGE = "InlinePackage" -class StoredProcedureParameterType(str, Enum): +class StoredProcedureParameterType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Stored procedure parameter type. """ - string = "String" - int = "Int" - int64 = "Int64" - decimal = "Decimal" - guid = "Guid" - boolean = "Boolean" - date = "Date" + STRING = "String" + INT = "Int" + INT64 = "Int64" + DECIMAL = "Decimal" + GUID = "Guid" + BOOLEAN = "Boolean" + DATE = "Date" -class SybaseAuthenticationType(str, Enum): +class SybaseAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """AuthenticationType to be used for connection. """ - basic = "Basic" - windows = "Windows" + BASIC = "Basic" + WINDOWS = "Windows" -class TeradataAuthenticationType(str, Enum): +class TeradataAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """AuthenticationType to be used for connection. """ - basic = "Basic" - windows = "Windows" + BASIC = "Basic" + WINDOWS = "Windows" + +class TeradataPartitionOption(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The partition mechanism that will be used for teradata read in parallel. + """ + + NONE = "None" + HASH = "Hash" + DYNAMIC_RANGE = "DynamicRange" -class TriggerRunStatus(str, Enum): +class TriggerReferenceType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Trigger reference type. + """ + + TRIGGER_REFERENCE = "TriggerReference" + +class TriggerRunStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Trigger run status. """ - succeeded = "Succeeded" - failed = "Failed" - inprogress = "Inprogress" + SUCCEEDED = "Succeeded" + FAILED = "Failed" + INPROGRESS = "Inprogress" -class TriggerRuntimeState(str, Enum): +class TriggerRuntimeState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Enumerates possible state of Triggers. """ - started = "Started" - stopped = "Stopped" - disabled = "Disabled" + STARTED = "Started" + STOPPED = "Stopped" + DISABLED = "Disabled" + +class TumblingWindowFrequency(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Enumerates possible frequency option for the tumbling window trigger. + """ + + MINUTE = "Minute" + HOUR = "Hour" + +class Type(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Linked service reference type. + """ + + LINKED_SERVICE_REFERENCE = "LinkedServiceReference" -class VariableType(str, Enum): +class VariableType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Variable type. """ - string = "String" - bool = "Bool" - boolean = "Boolean" - array = "Array" + STRING = "String" + BOOL = "Bool" + BOOLEAN = "Boolean" + ARRAY = "Array" -class WebActivityMethod(str, Enum): +class WebActivityMethod(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The list of HTTP methods supported by a WebActivity. """ - get = "GET" - post = "POST" - put = "PUT" - delete = "DELETE" + GET = "GET" + POST = "POST" + PUT = "PUT" + DELETE = "DELETE" -class WebAuthenticationType(str, Enum): +class WebAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Type of authentication used to connect to the web table source. """ - basic = "Basic" - anonymous = "Anonymous" - client_certificate = "ClientCertificate" + BASIC = "Basic" + ANONYMOUS = "Anonymous" + CLIENT_CERTIFICATE = "ClientCertificate" + +class WebHookActivityMethod(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The list of HTTP methods supported by a WebHook activity. + """ + + POST = "POST" diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_models.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_models.py index 5d05dbaa3302..a9b3664d40af 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_models.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_models.py @@ -14,7 +14,7 @@ class Activity(msrest.serialization.Model): """A pipeline activity. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AppendVariableActivity, ControlActivity, ExecutePipelineActivity, ExecutionActivity, FilterActivity, ForEachActivity, IfConditionActivity, SetVariableActivity, SwitchActivity, UntilActivity, ValidationActivity, WaitActivity, WebHookActivity. + sub-classes are: AppendVariableActivity, ControlActivity, ExecutePipelineActivity, ExecutionActivity, FilterActivity, ForEachActivity, IfConditionActivity, SetVariableActivity, SynapseSparkJobDefinitionActivity, SqlPoolStoredProcedureActivity, SwitchActivity, SynapseNotebookActivity, UntilActivity, ValidationActivity, WaitActivity, WebHookActivity. All required parameters must be populated in order to send to Azure. @@ -48,7 +48,7 @@ class Activity(msrest.serialization.Model): } _subtype_map = { - 'type': {'AppendVariable': 'AppendVariableActivity', 'Container': 'ControlActivity', 'ExecutePipeline': 'ExecutePipelineActivity', 'Execution': 'ExecutionActivity', 'Filter': 'FilterActivity', 'ForEach': 'ForEachActivity', 'IfCondition': 'IfConditionActivity', 'SetVariable': 'SetVariableActivity', 'Switch': 'SwitchActivity', 'Until': 'UntilActivity', 'Validation': 'ValidationActivity', 'Wait': 'WaitActivity', 'WebHook': 'WebHookActivity'} + 'type': {'AppendVariable': 'AppendVariableActivity', 'Container': 'ControlActivity', 'ExecutePipeline': 'ExecutePipelineActivity', 'Execution': 'ExecutionActivity', 'Filter': 'FilterActivity', 'ForEach': 'ForEachActivity', 'IfCondition': 'IfConditionActivity', 'SetVariable': 'SetVariableActivity', 'SparkJob': 'SynapseSparkJobDefinitionActivity', 'SqlPoolStoredProcedure': 'SqlPoolStoredProcedureActivity', 'Switch': 'SwitchActivity', 'SynapseNotebook': 'SynapseNotebookActivity', 'Until': 'UntilActivity', 'Validation': 'ValidationActivity', 'Wait': 'WaitActivity', 'WebHook': 'WebHookActivity'} } def __init__( @@ -58,7 +58,7 @@ def __init__( super(Activity, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) self.name = kwargs['name'] - self.type = 'Activity' + self.type = 'Activity' # type: str self.description = kwargs.get('description', None) self.depends_on = kwargs.get('depends_on', None) self.user_properties = kwargs.get('user_properties', None) @@ -334,7 +334,7 @@ def __init__( ): super(LinkedService, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) - self.type = 'LinkedService' + self.type = 'LinkedService' # type: str self.connect_via = kwargs.get('connect_via', None) self.description = kwargs.get('description', None) self.parameters = kwargs.get('parameters', None) @@ -422,7 +422,7 @@ def __init__( **kwargs ): super(AmazonMWSLinkedService, self).__init__(**kwargs) - self.type = 'AmazonMWS' + self.type = 'AmazonMWS' # type: str self.endpoint = kwargs['endpoint'] self.marketplace_id = kwargs['marketplace_id'] self.seller_id = kwargs['seller_id'] @@ -439,7 +439,7 @@ class Dataset(msrest.serialization.Model): """The Azure Data Factory nested object which identifies data within different data stores, such as tables, files, folders, and documents. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AmazonMWSObjectDataset, AmazonRedshiftTableDataset, AvroDataset, AzureDataExplorerTableDataset, AzureMariaDBTableDataset, AzureMySqlTableDataset, AzurePostgreSqlTableDataset, AzureSearchIndexDataset, AzureSqlDWTableDataset, AzureSqlMITableDataset, AzureSqlTableDataset, AzureTableDataset, BinaryDataset, CassandraTableDataset, CommonDataServiceForAppsEntityDataset, ConcurObjectDataset, CosmosDbMongoDbApiCollectionDataset, CosmosDbSqlApiCollectionDataset, CouchbaseTableDataset, Db2TableDataset, DelimitedTextDataset, DocumentDbCollectionDataset, DrillTableDataset, DynamicsAXResourceDataset, DynamicsCrmEntityDataset, DynamicsEntityDataset, EloquaObjectDataset, GoogleAdWordsObjectDataset, GoogleBigQueryObjectDataset, GreenplumTableDataset, HBaseObjectDataset, HiveObjectDataset, HubspotObjectDataset, ImpalaObjectDataset, InformixTableDataset, JiraObjectDataset, JsonDataset, MagentoObjectDataset, MariaDBTableDataset, MarketoObjectDataset, MicrosoftAccessTableDataset, MongoDbCollectionDataset, MongoDbV2CollectionDataset, MySqlTableDataset, NetezzaTableDataset, ODataResourceDataset, OdbcTableDataset, Office365Dataset, OracleServiceCloudObjectDataset, OracleTableDataset, OrcDataset, ParquetDataset, PaypalObjectDataset, PhoenixObjectDataset, PostgreSqlTableDataset, PrestoObjectDataset, QuickBooksObjectDataset, RelationalTableDataset, ResponsysObjectDataset, RestResourceDataset, SalesforceMarketingCloudObjectDataset, SalesforceObjectDataset, SalesforceServiceCloudObjectDataset, SapBwCubeDataset, SapCloudForCustomerResourceDataset, SapEccResourceDataset, SapHanaTableDataset, SapOpenHubTableDataset, SapTableResourceDataset, ServiceNowObjectDataset, ShopifyObjectDataset, SparkObjectDataset, SqlServerTableDataset, SquareObjectDataset, SybaseTableDataset, TeradataTableDataset, VerticaTableDataset, WebTableDataset, XeroObjectDataset, ZohoObjectDataset. + sub-classes are: AmazonMWSObjectDataset, AmazonRedshiftTableDataset, AvroDataset, AzureDataExplorerTableDataset, AzureMariaDBTableDataset, AzureMySqlTableDataset, AzurePostgreSqlTableDataset, AzureSearchIndexDataset, AzureSqlDWTableDataset, AzureSqlMITableDataset, AzureSqlTableDataset, AzureTableDataset, BinaryDataset, CassandraTableDataset, CommonDataServiceForAppsEntityDataset, ConcurObjectDataset, CosmosDbMongoDbApiCollectionDataset, CosmosDbSqlApiCollectionDataset, CouchbaseTableDataset, CustomDataset, Db2TableDataset, DelimitedTextDataset, DocumentDbCollectionDataset, DrillTableDataset, DynamicsAXResourceDataset, DynamicsCrmEntityDataset, DynamicsEntityDataset, EloquaObjectDataset, GoogleAdWordsObjectDataset, GoogleBigQueryObjectDataset, GreenplumTableDataset, HBaseObjectDataset, HiveObjectDataset, HubspotObjectDataset, ImpalaObjectDataset, InformixTableDataset, JiraObjectDataset, JsonDataset, MagentoObjectDataset, MariaDBTableDataset, MarketoObjectDataset, MicrosoftAccessTableDataset, MongoDbCollectionDataset, MongoDbV2CollectionDataset, MySqlTableDataset, NetezzaTableDataset, ODataResourceDataset, OdbcTableDataset, Office365Dataset, OracleServiceCloudObjectDataset, OracleTableDataset, OrcDataset, ParquetDataset, PaypalObjectDataset, PhoenixObjectDataset, PostgreSqlTableDataset, PrestoObjectDataset, QuickBooksObjectDataset, RelationalTableDataset, ResponsysObjectDataset, RestResourceDataset, SalesforceMarketingCloudObjectDataset, SalesforceObjectDataset, SalesforceServiceCloudObjectDataset, SapBwCubeDataset, SapCloudForCustomerResourceDataset, SapEccResourceDataset, SapHanaTableDataset, SapOpenHubTableDataset, SapTableResourceDataset, ServiceNowObjectDataset, ShopifyObjectDataset, SparkObjectDataset, SqlServerTableDataset, SquareObjectDataset, SybaseTableDataset, TeradataTableDataset, VerticaTableDataset, WebTableDataset, XeroObjectDataset, ZohoObjectDataset. All required parameters must be populated in order to send to Azure. @@ -485,7 +485,7 @@ class Dataset(msrest.serialization.Model): } _subtype_map = { - 'type': {'AmazonMWSObject': 'AmazonMWSObjectDataset', 'AmazonRedshiftTable': 'AmazonRedshiftTableDataset', 'Avro': 'AvroDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'AzureMariaDBTable': 'AzureMariaDBTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'Binary': 'BinaryDataset', 'CassandraTable': 'CassandraTableDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'ConcurObject': 'ConcurObjectDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'CosmosDbSqlApiCollection': 'CosmosDbSqlApiCollectionDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'Db2Table': 'Db2TableDataset', 'DelimitedText': 'DelimitedTextDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'DrillTable': 'DrillTableDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'EloquaObject': 'EloquaObjectDataset', 'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'HBaseObject': 'HBaseObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'InformixTable': 'InformixTableDataset', 'JiraObject': 'JiraObjectDataset', 'Json': 'JsonDataset', 'MagentoObject': 'MagentoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MarketoObject': 'MarketoObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MySqlTable': 'MySqlTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ODataResource': 'ODataResourceDataset', 'OdbcTable': 'OdbcTableDataset', 'Office365Table': 'Office365Dataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'OracleTable': 'OracleTableDataset', 'Orc': 'OrcDataset', 'Parquet': 'ParquetDataset', 'PaypalObject': 'PaypalObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'PrestoObject': 'PrestoObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'RestResource': 'RestResourceDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SapBwCube': 'SapBwCubeDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'SparkObject': 'SparkObjectDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SquareObject': 'SquareObjectDataset', 'SybaseTable': 'SybaseTableDataset', 'TeradataTable': 'TeradataTableDataset', 'VerticaTable': 'VerticaTableDataset', 'WebTable': 'WebTableDataset', 'XeroObject': 'XeroObjectDataset', 'ZohoObject': 'ZohoObjectDataset'} + 'type': {'AmazonMWSObject': 'AmazonMWSObjectDataset', 'AmazonRedshiftTable': 'AmazonRedshiftTableDataset', 'Avro': 'AvroDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'AzureMariaDBTable': 'AzureMariaDBTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'Binary': 'BinaryDataset', 'CassandraTable': 'CassandraTableDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'ConcurObject': 'ConcurObjectDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'CosmosDbSqlApiCollection': 'CosmosDbSqlApiCollectionDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'CustomDataset': 'CustomDataset', 'Db2Table': 'Db2TableDataset', 'DelimitedText': 'DelimitedTextDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'DrillTable': 'DrillTableDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'EloquaObject': 'EloquaObjectDataset', 'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'HBaseObject': 'HBaseObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'InformixTable': 'InformixTableDataset', 'JiraObject': 'JiraObjectDataset', 'Json': 'JsonDataset', 'MagentoObject': 'MagentoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MarketoObject': 'MarketoObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MySqlTable': 'MySqlTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ODataResource': 'ODataResourceDataset', 'OdbcTable': 'OdbcTableDataset', 'Office365Table': 'Office365Dataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'OracleTable': 'OracleTableDataset', 'Orc': 'OrcDataset', 'Parquet': 'ParquetDataset', 'PaypalObject': 'PaypalObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'PrestoObject': 'PrestoObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'RestResource': 'RestResourceDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SapBwCube': 'SapBwCubeDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'SparkObject': 'SparkObjectDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SquareObject': 'SquareObjectDataset', 'SybaseTable': 'SybaseTableDataset', 'TeradataTable': 'TeradataTableDataset', 'VerticaTable': 'VerticaTableDataset', 'WebTable': 'WebTableDataset', 'XeroObject': 'XeroObjectDataset', 'ZohoObject': 'ZohoObjectDataset'} } def __init__( @@ -494,7 +494,7 @@ def __init__( ): super(Dataset, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) - self.type = 'Dataset' + self.type = 'Dataset' # type: str self.description = kwargs.get('description', None) self.structure = kwargs.get('structure', None) self.schema = kwargs.get('schema', None) @@ -558,10 +558,165 @@ def __init__( **kwargs ): super(AmazonMWSObjectDataset, self).__init__(**kwargs) - self.type = 'AmazonMWSObject' + self.type = 'AmazonMWSObject' # type: str self.table_name = kwargs.get('table_name', None) +class CopySource(msrest.serialization.Model): + """A copy activity source. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AvroSource, AzureBlobFSSource, AzureDataExplorerSource, AzureDataLakeStoreSource, BinarySource, BlobSource, CommonDataServiceForAppsSource, CosmosDbMongoDbApiSource, CosmosDbSqlApiSource, DelimitedTextSource, DocumentDbCollectionSource, DynamicsCrmSource, DynamicsSource, FileSystemSource, HdfsSource, HttpSource, JsonSource, MicrosoftAccessSource, MongoDbSource, MongoDbV2Source, ODataSource, Office365Source, OracleSource, OrcSource, ParquetSource, RelationalSource, RestSource, SalesforceServiceCloudSource, TabularSource, WebSource. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + } + + _subtype_map = { + 'type': {'AvroSource': 'AvroSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'BinarySource': 'BinarySource', 'BlobSource': 'BlobSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'CosmosDbSqlApiSource': 'CosmosDbSqlApiSource', 'DelimitedTextSource': 'DelimitedTextSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'FileSystemSource': 'FileSystemSource', 'HdfsSource': 'HdfsSource', 'HttpSource': 'HttpSource', 'JsonSource': 'JsonSource', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'MongoDbSource': 'MongoDbSource', 'MongoDbV2Source': 'MongoDbV2Source', 'ODataSource': 'ODataSource', 'Office365Source': 'Office365Source', 'OracleSource': 'OracleSource', 'OrcSource': 'OrcSource', 'ParquetSource': 'ParquetSource', 'RelationalSource': 'RelationalSource', 'RestSource': 'RestSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'TabularSource': 'TabularSource', 'WebSource': 'WebSource'} + } + + def __init__( + self, + **kwargs + ): + super(CopySource, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = 'CopySource' # type: str + self.source_retry_count = kwargs.get('source_retry_count', None) + self.source_retry_wait = kwargs.get('source_retry_wait', None) + self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) + + +class TabularSource(CopySource): + """Copy activity sources of tabular type. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AmazonMWSSource, AmazonRedshiftSource, AzureMariaDBSource, AzureMySqlSource, AzurePostgreSqlSource, AzureSqlSource, AzureTableSource, CassandraSource, ConcurSource, CouchbaseSource, Db2Source, DrillSource, DynamicsAXSource, EloquaSource, GoogleAdWordsSource, GoogleBigQuerySource, GreenplumSource, HBaseSource, HiveSource, HubspotSource, ImpalaSource, InformixSource, JiraSource, MagentoSource, MariaDBSource, MarketoSource, MySqlSource, NetezzaSource, OdbcSource, OracleServiceCloudSource, PaypalSource, PhoenixSource, PostgreSqlSource, PrestoSource, QuickBooksSource, ResponsysSource, SalesforceMarketingCloudSource, SalesforceSource, SapBwSource, SapCloudForCustomerSource, SapEccSource, SapHanaSource, SapOpenHubSource, SapTableSource, ServiceNowSource, ShopifySource, SparkSource, SqlDWSource, SqlMISource, SqlServerSource, SqlSource, SquareSource, SybaseSource, TeradataSource, VerticaSource, XeroSource, ZohoSource. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + } + + _subtype_map = { + 'type': {'AmazonMWSSource': 'AmazonMWSSource', 'AmazonRedshiftSource': 'AmazonRedshiftSource', 'AzureMariaDBSource': 'AzureMariaDBSource', 'AzureMySqlSource': 'AzureMySqlSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AzureSqlSource': 'AzureSqlSource', 'AzureTableSource': 'AzureTableSource', 'CassandraSource': 'CassandraSource', 'ConcurSource': 'ConcurSource', 'CouchbaseSource': 'CouchbaseSource', 'Db2Source': 'Db2Source', 'DrillSource': 'DrillSource', 'DynamicsAXSource': 'DynamicsAXSource', 'EloquaSource': 'EloquaSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'GreenplumSource': 'GreenplumSource', 'HBaseSource': 'HBaseSource', 'HiveSource': 'HiveSource', 'HubspotSource': 'HubspotSource', 'ImpalaSource': 'ImpalaSource', 'InformixSource': 'InformixSource', 'JiraSource': 'JiraSource', 'MagentoSource': 'MagentoSource', 'MariaDBSource': 'MariaDBSource', 'MarketoSource': 'MarketoSource', 'MySqlSource': 'MySqlSource', 'NetezzaSource': 'NetezzaSource', 'OdbcSource': 'OdbcSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'PaypalSource': 'PaypalSource', 'PhoenixSource': 'PhoenixSource', 'PostgreSqlSource': 'PostgreSqlSource', 'PrestoSource': 'PrestoSource', 'QuickBooksSource': 'QuickBooksSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'SalesforceSource': 'SalesforceSource', 'SapBwSource': 'SapBwSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SapEccSource': 'SapEccSource', 'SapHanaSource': 'SapHanaSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapTableSource': 'SapTableSource', 'ServiceNowSource': 'ServiceNowSource', 'ShopifySource': 'ShopifySource', 'SparkSource': 'SparkSource', 'SqlDWSource': 'SqlDWSource', 'SqlMISource': 'SqlMISource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'SquareSource': 'SquareSource', 'SybaseSource': 'SybaseSource', 'TeradataSource': 'TeradataSource', 'VerticaSource': 'VerticaSource', 'XeroSource': 'XeroSource', 'ZohoSource': 'ZohoSource'} + } + + def __init__( + self, + **kwargs + ): + super(TabularSource, self).__init__(**kwargs) + self.type = 'TabularSource' # type: str + self.query_timeout = kwargs.get('query_timeout', None) + + +class AmazonMWSSource(TabularSource): + """A copy activity Amazon Marketplace Web Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(AmazonMWSSource, self).__init__(**kwargs) + self.type = 'AmazonMWSSource' # type: str + self.query = kwargs.get('query', None) + + class AmazonRedshiftLinkedService(LinkedService): """Linked service for Amazon Redshift. @@ -626,7 +781,7 @@ def __init__( **kwargs ): super(AmazonRedshiftLinkedService, self).__init__(**kwargs) - self.type = 'AmazonRedshift' + self.type = 'AmazonRedshift' # type: str self.server = kwargs['server'] self.username = kwargs.get('username', None) self.password = kwargs.get('password', None) @@ -635,6 +790,61 @@ def __init__( self.encrypted_credential = kwargs.get('encrypted_credential', None) +class AmazonRedshiftSource(TabularSource): + """A copy activity source for Amazon Redshift Source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: Database query. Type: string (or Expression with resultType string). + :type query: object + :param redshift_unload_settings: The Amazon S3 settings needed for the interim Amazon S3 when + copying from Amazon Redshift with unload. With this, data from Amazon Redshift source will be + unloaded into S3 first and then copied into the targeted sink from the interim S3. + :type redshift_unload_settings: ~azure.synapse.artifacts.models.RedshiftUnloadSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + 'redshift_unload_settings': {'key': 'redshiftUnloadSettings', 'type': 'RedshiftUnloadSettings'}, + } + + def __init__( + self, + **kwargs + ): + super(AmazonRedshiftSource, self).__init__(**kwargs) + self.type = 'AmazonRedshiftSource' # type: str + self.query = kwargs.get('query', None) + self.redshift_unload_settings = kwargs.get('redshift_unload_settings', None) + + class AmazonRedshiftTableDataset(Dataset): """The Amazon Redshift table dataset. @@ -698,7 +908,7 @@ def __init__( **kwargs ): super(AmazonRedshiftTableDataset, self).__init__(**kwargs) - self.type = 'AmazonRedshiftTable' + self.type = 'AmazonRedshiftTable' # type: str self.table_name = kwargs.get('table_name', None) self.table = kwargs.get('table', None) self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) @@ -760,737 +970,705 @@ def __init__( **kwargs ): super(AmazonS3LinkedService, self).__init__(**kwargs) - self.type = 'AmazonS3' + self.type = 'AmazonS3' # type: str self.access_key_id = kwargs.get('access_key_id', None) self.secret_access_key = kwargs.get('secret_access_key', None) self.service_url = kwargs.get('service_url', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) -class AppendVariableActivity(Activity): - """Append value for a Variable of type Array. +class DatasetLocation(msrest.serialization.Model): + """Dataset location. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AmazonS3Location, AzureBlobFSLocation, AzureBlobStorageLocation, AzureDataLakeStoreLocation, AzureFileStorageLocation, FileServerLocation, FtpServerLocation, GoogleCloudStorageLocation, HdfsLocation, HttpServerLocation, SftpLocation. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. Type of dataset storage location.Constant filled by server. :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param variable_name: Name of the variable whose value needs to be appended to. - :type variable_name: str - :param value: Value to be appended. Could be a static value or Expression. - :type value: object + :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + string). + :type file_name: object """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'variable_name': {'key': 'typeProperties.variableName', 'type': 'str'}, - 'value': {'key': 'typeProperties.value', 'type': 'object'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + _subtype_map = { + 'type': {'AmazonS3Location': 'AmazonS3Location', 'AzureBlobFSLocation': 'AzureBlobFSLocation', 'AzureBlobStorageLocation': 'AzureBlobStorageLocation', 'AzureDataLakeStoreLocation': 'AzureDataLakeStoreLocation', 'AzureFileStorageLocation': 'AzureFileStorageLocation', 'FileServerLocation': 'FileServerLocation', 'FtpServerLocation': 'FtpServerLocation', 'GoogleCloudStorageLocation': 'GoogleCloudStorageLocation', 'HdfsLocation': 'HdfsLocation', 'HttpServerLocation': 'HttpServerLocation', 'SftpLocation': 'SftpLocation'} } def __init__( self, **kwargs ): - super(AppendVariableActivity, self).__init__(**kwargs) - self.type = 'AppendVariable' - self.variable_name = kwargs.get('variable_name', None) - self.value = kwargs.get('value', None) + super(DatasetLocation, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = 'DatasetLocation' # type: str + self.folder_path = kwargs.get('folder_path', None) + self.file_name = kwargs.get('file_name', None) -class AvroDataset(Dataset): - """Avro dataset. +class AmazonS3Location(DatasetLocation): + """The location of amazon S3 dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Type of dataset storage location.Constant filled by server. :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param location: The location of the avro storage. - :type location: ~azure.synapse.artifacts.models.DatasetLocation - :param avro_compression_codec: Possible values include: "none", "deflate", "snappy", "xz", - "bzip2". - :type avro_compression_codec: str or ~azure.synapse.artifacts.models.AvroCompressionCodec - :param avro_compression_level: - :type avro_compression_level: int + :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + string). + :type file_name: object + :param bucket_name: Specify the bucketName of amazon S3. Type: string (or Expression with + resultType string). + :type bucket_name: object + :param version: Specify the version of amazon S3. Type: string (or Expression with resultType + string). + :type version: object """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, - 'avro_compression_level': {'maximum': 9, 'minimum': 1}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'avro_compression_codec': {'key': 'typeProperties.avroCompressionCodec', 'type': 'str'}, - 'avro_compression_level': {'key': 'typeProperties.avroCompressionLevel', 'type': 'int'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'bucket_name': {'key': 'bucketName', 'type': 'object'}, + 'version': {'key': 'version', 'type': 'object'}, } def __init__( self, **kwargs ): - super(AvroDataset, self).__init__(**kwargs) - self.type = 'Avro' - self.location = kwargs.get('location', None) - self.avro_compression_codec = kwargs.get('avro_compression_codec', None) - self.avro_compression_level = kwargs.get('avro_compression_level', None) + super(AmazonS3Location, self).__init__(**kwargs) + self.type = 'AmazonS3Location' # type: str + self.bucket_name = kwargs.get('bucket_name', None) + self.version = kwargs.get('version', None) -class AzureBatchLinkedService(LinkedService): - """Azure Batch linked service. +class StoreReadSettings(msrest.serialization.Model): + """Connector read setting. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AmazonS3ReadSettings, AzureBlobFSReadSettings, AzureBlobStorageReadSettings, AzureDataLakeStoreReadSettings, AzureFileStorageReadSettings, FileServerReadSettings, FtpReadSettings, GoogleCloudStorageReadSettings, HdfsReadSettings, HttpReadSettings, SftpReadSettings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. The read setting type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param account_name: Required. The Azure Batch account name. Type: string (or Expression with - resultType string). - :type account_name: object - :param access_key: The Azure Batch account access key. - :type access_key: ~azure.synapse.artifacts.models.SecretBase - :param batch_uri: Required. The Azure Batch URI. Type: string (or Expression with resultType - string). - :type batch_uri: object - :param pool_name: Required. The Azure Batch pool name. Type: string (or Expression with - resultType string). - :type pool_name: object - :param linked_service_name: Required. The Azure Storage linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object """ _validation = { 'type': {'required': True}, - 'account_name': {'required': True}, - 'batch_uri': {'required': True}, - 'pool_name': {'required': True}, - 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'account_name': {'key': 'typeProperties.accountName', 'type': 'object'}, - 'access_key': {'key': 'typeProperties.accessKey', 'type': 'SecretBase'}, - 'batch_uri': {'key': 'typeProperties.batchUri', 'type': 'object'}, - 'pool_name': {'key': 'typeProperties.poolName', 'type': 'object'}, - 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + } + + _subtype_map = { + 'type': {'AmazonS3ReadSettings': 'AmazonS3ReadSettings', 'AzureBlobFSReadSettings': 'AzureBlobFSReadSettings', 'AzureBlobStorageReadSettings': 'AzureBlobStorageReadSettings', 'AzureDataLakeStoreReadSettings': 'AzureDataLakeStoreReadSettings', 'AzureFileStorageReadSettings': 'AzureFileStorageReadSettings', 'FileServerReadSettings': 'FileServerReadSettings', 'FtpReadSettings': 'FtpReadSettings', 'GoogleCloudStorageReadSettings': 'GoogleCloudStorageReadSettings', 'HdfsReadSettings': 'HdfsReadSettings', 'HttpReadSettings': 'HttpReadSettings', 'SftpReadSettings': 'SftpReadSettings'} } def __init__( self, **kwargs ): - super(AzureBatchLinkedService, self).__init__(**kwargs) - self.type = 'AzureBatch' - self.account_name = kwargs['account_name'] - self.access_key = kwargs.get('access_key', None) - self.batch_uri = kwargs['batch_uri'] - self.pool_name = kwargs['pool_name'] - self.linked_service_name = kwargs['linked_service_name'] - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(StoreReadSettings, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = 'StoreReadSettings' # type: str + self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) -class AzureBlobFSLinkedService(LinkedService): - """Azure Data Lake Storage Gen2 linked service. +class AmazonS3ReadSettings(StoreReadSettings): + """Azure data lake store read settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. The read setting type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param url: Required. Endpoint for the Azure Data Lake Storage Gen2 service. Type: string (or - Expression with resultType string). - :type url: object - :param account_key: Account key for the Azure Data Lake Storage Gen2 service. Type: string (or - Expression with resultType string). - :type account_key: object - :param service_principal_id: The ID of the application used to authenticate against the Azure - Data Lake Storage Gen2 account. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The Key of the application used to authenticate against the Azure - Data Lake Storage Gen2 account. - :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal belongs. Type: - string (or Expression with resultType string). - :type tenant: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :type recursive: object + :param wildcard_folder_path: AmazonS3 wildcardFolderPath. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type wildcard_folder_path: object + :param wildcard_file_name: AmazonS3 wildcardFileName. Type: string (or Expression with + resultType string). + :type wildcard_file_name: object + :param prefix: The prefix filter for the S3 object name. Type: string (or Expression with + resultType string). + :type prefix: object + :param enable_partition_discovery: Indicates whether to enable partition discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. Type: string (or + Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression + with resultType string). + :type modified_datetime_end: object """ _validation = { 'type': {'required': True}, - 'url': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'account_key': {'key': 'typeProperties.accountKey', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'prefix': {'key': 'prefix', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } def __init__( self, **kwargs ): - super(AzureBlobFSLinkedService, self).__init__(**kwargs) - self.type = 'AzureBlobFS' - self.url = kwargs['url'] - self.account_key = kwargs.get('account_key', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.tenant = kwargs.get('tenant', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(AmazonS3ReadSettings, self).__init__(**kwargs) + self.type = 'AmazonS3ReadSettings' # type: str + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.prefix = kwargs.get('prefix', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) -class AzureBlobStorageLinkedService(LinkedService): - """The azure blob storage linked service. +class AppendVariableActivity(Activity): + """Append value for a Variable of type Array. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. + :param description: Activity description. :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: The connection string. It is mutually exclusive with sasUri, - serviceEndpoint property. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param account_key: The Azure key vault secret reference of accountKey in connection string. - :type account_key: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference - :param sas_uri: SAS URI of the Azure Blob Storage resource. It is mutually exclusive with - connectionString, serviceEndpoint property. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type sas_uri: object - :param sas_token: The Azure key vault secret reference of sasToken in sas uri. - :type sas_token: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference - :param service_endpoint: Blob service endpoint of the Azure Blob Storage resource. It is - mutually exclusive with connectionString, sasUri property. - :type service_endpoint: str - :param service_principal_id: The ID of the service principal used to authenticate against Azure - SQL Data Warehouse. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The key of the service principal used to authenticate against - Azure SQL Data Warehouse. - :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal belongs. Type: - string (or Expression with resultType string). - :type tenant: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param variable_name: Name of the variable whose value needs to be appended to. + :type variable_name: str + :param value: Value to be appended. Could be a static value or Expression. + :type value: object """ _validation = { + 'name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, - 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, - 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, - 'service_endpoint': {'key': 'typeProperties.serviceEndpoint', 'type': 'str'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'variable_name': {'key': 'typeProperties.variableName', 'type': 'str'}, + 'value': {'key': 'typeProperties.value', 'type': 'object'}, } def __init__( self, **kwargs ): - super(AzureBlobStorageLinkedService, self).__init__(**kwargs) - self.type = 'AzureBlobStorage' - self.connection_string = kwargs.get('connection_string', None) - self.account_key = kwargs.get('account_key', None) - self.sas_uri = kwargs.get('sas_uri', None) - self.sas_token = kwargs.get('sas_token', None) - self.service_endpoint = kwargs.get('service_endpoint', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.tenant = kwargs.get('tenant', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(AppendVariableActivity, self).__init__(**kwargs) + self.type = 'AppendVariable' # type: str + self.variable_name = kwargs.get('variable_name', None) + self.value = kwargs.get('value', None) -class AzureDatabricksLinkedService(LinkedService): - """Azure Databricks linked service. +class AutoPauseProperties(msrest.serialization.Model): + """Auto-pausing properties of a Big Data pool powered by Apache Spark. + + :param delay_in_minutes: Number of minutes of idle time before the Big Data pool is + automatically paused. + :type delay_in_minutes: int + :param enabled: Whether auto-pausing is enabled for the Big Data pool. + :type enabled: bool + """ + + _attribute_map = { + 'delay_in_minutes': {'key': 'delayInMinutes', 'type': 'int'}, + 'enabled': {'key': 'enabled', 'type': 'bool'}, + } + + def __init__( + self, + **kwargs + ): + super(AutoPauseProperties, self).__init__(**kwargs) + self.delay_in_minutes = kwargs.get('delay_in_minutes', None) + self.enabled = kwargs.get('enabled', None) + + +class AutoScaleProperties(msrest.serialization.Model): + """Auto-scaling properties of a Big Data pool powered by Apache Spark. + + :param min_node_count: The minimum number of nodes the Big Data pool can support. + :type min_node_count: int + :param enabled: Whether automatic scaling is enabled for the Big Data pool. + :type enabled: bool + :param max_node_count: The maximum number of nodes the Big Data pool can support. + :type max_node_count: int + """ + + _attribute_map = { + 'min_node_count': {'key': 'minNodeCount', 'type': 'int'}, + 'enabled': {'key': 'enabled', 'type': 'bool'}, + 'max_node_count': {'key': 'maxNodeCount', 'type': 'int'}, + } + + def __init__( + self, + **kwargs + ): + super(AutoScaleProperties, self).__init__(**kwargs) + self.min_node_count = kwargs.get('min_node_count', None) + self.enabled = kwargs.get('enabled', None) + self.max_node_count = kwargs.get('max_node_count', None) + + +class AvroDataset(Dataset): + """Avro dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Type of dataset.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. + :param description: Dataset description. :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param domain: Required. :code:``.azuredatabricks.net, domain name of your Databricks - deployment. Type: string (or Expression with resultType string). - :type domain: object - :param access_token: Required. Access token for databricks REST API. Refer to - https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string (or Expression - with resultType string). - :type access_token: ~azure.synapse.artifacts.models.SecretBase - :param existing_cluster_id: The id of an existing interactive cluster that will be used for all - runs of this activity. Type: string (or Expression with resultType string). - :type existing_cluster_id: object - :param instance_pool_id: The id of an existing instance pool that will be used for all runs of - this activity. Type: string (or Expression with resultType string). - :type instance_pool_id: object - :param new_cluster_version: If not using an existing interactive cluster, this specifies the - Spark version of a new job cluster or instance pool nodes created for each run of this - activity. Required if instancePoolId is specified. Type: string (or Expression with resultType - string). - :type new_cluster_version: object - :param new_cluster_num_of_worker: If not using an existing interactive cluster, this specifies - the number of worker nodes to use for the new job cluster or instance pool. For new job - clusters, this a string-formatted Int32, like '1' means numOfWorker is 1 or '1:10' means auto- - scale from 1 (min) to 10 (max). For instance pools, this is a string-formatted Int32, and can - only specify a fixed number of worker nodes, such as '2'. Required if newClusterVersion is - specified. Type: string (or Expression with resultType string). - :type new_cluster_num_of_worker: object - :param new_cluster_node_type: The node type of the new job cluster. This property is required - if newClusterVersion is specified and instancePoolId is not specified. If instancePoolId is - specified, this property is ignored. Type: string (or Expression with resultType string). - :type new_cluster_node_type: object - :param new_cluster_spark_conf: A set of optional, user-specified Spark configuration key-value - pairs. - :type new_cluster_spark_conf: dict[str, object] - :param new_cluster_spark_env_vars: A set of optional, user-specified Spark environment - variables key-value pairs. - :type new_cluster_spark_env_vars: dict[str, object] - :param new_cluster_custom_tags: Additional tags for cluster resources. This property is ignored - in instance pool configurations. - :type new_cluster_custom_tags: dict[str, object] - :param new_cluster_driver_node_type: The driver node type for the new job cluster. This - property is ignored in instance pool configurations. Type: string (or Expression with - resultType string). - :type new_cluster_driver_node_type: object - :param new_cluster_init_scripts: User-defined initialization scripts for the new cluster. Type: - array of strings (or Expression with resultType array of strings). - :type new_cluster_init_scripts: object - :param new_cluster_enable_elastic_disk: Enable the elastic disk on the new cluster. This - property is now ignored, and takes the default elastic disk behavior in Databricks (elastic - disks are always enabled). Type: boolean (or Expression with resultType boolean). - :type new_cluster_enable_elastic_disk: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param location: The location of the avro storage. + :type location: ~azure.synapse.artifacts.models.DatasetLocation + :param avro_compression_codec: Possible values include: "none", "deflate", "snappy", "xz", + "bzip2". + :type avro_compression_codec: str or ~azure.synapse.artifacts.models.AvroCompressionCodec + :param avro_compression_level: + :type avro_compression_level: int """ _validation = { 'type': {'required': True}, - 'domain': {'required': True}, - 'access_token': {'required': True}, + 'linked_service_name': {'required': True}, + 'avro_compression_level': {'maximum': 9, 'minimum': 1}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'domain': {'key': 'typeProperties.domain', 'type': 'object'}, - 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, - 'existing_cluster_id': {'key': 'typeProperties.existingClusterId', 'type': 'object'}, - 'instance_pool_id': {'key': 'typeProperties.instancePoolId', 'type': 'object'}, - 'new_cluster_version': {'key': 'typeProperties.newClusterVersion', 'type': 'object'}, - 'new_cluster_num_of_worker': {'key': 'typeProperties.newClusterNumOfWorker', 'type': 'object'}, - 'new_cluster_node_type': {'key': 'typeProperties.newClusterNodeType', 'type': 'object'}, - 'new_cluster_spark_conf': {'key': 'typeProperties.newClusterSparkConf', 'type': '{object}'}, - 'new_cluster_spark_env_vars': {'key': 'typeProperties.newClusterSparkEnvVars', 'type': '{object}'}, - 'new_cluster_custom_tags': {'key': 'typeProperties.newClusterCustomTags', 'type': '{object}'}, - 'new_cluster_driver_node_type': {'key': 'typeProperties.newClusterDriverNodeType', 'type': 'object'}, - 'new_cluster_init_scripts': {'key': 'typeProperties.newClusterInitScripts', 'type': 'object'}, - 'new_cluster_enable_elastic_disk': {'key': 'typeProperties.newClusterEnableElasticDisk', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'avro_compression_codec': {'key': 'typeProperties.avroCompressionCodec', 'type': 'str'}, + 'avro_compression_level': {'key': 'typeProperties.avroCompressionLevel', 'type': 'int'}, } def __init__( self, **kwargs ): - super(AzureDatabricksLinkedService, self).__init__(**kwargs) - self.type = 'AzureDatabricks' - self.domain = kwargs['domain'] - self.access_token = kwargs['access_token'] - self.existing_cluster_id = kwargs.get('existing_cluster_id', None) - self.instance_pool_id = kwargs.get('instance_pool_id', None) - self.new_cluster_version = kwargs.get('new_cluster_version', None) - self.new_cluster_num_of_worker = kwargs.get('new_cluster_num_of_worker', None) - self.new_cluster_node_type = kwargs.get('new_cluster_node_type', None) - self.new_cluster_spark_conf = kwargs.get('new_cluster_spark_conf', None) - self.new_cluster_spark_env_vars = kwargs.get('new_cluster_spark_env_vars', None) - self.new_cluster_custom_tags = kwargs.get('new_cluster_custom_tags', None) - self.new_cluster_driver_node_type = kwargs.get('new_cluster_driver_node_type', None) - self.new_cluster_init_scripts = kwargs.get('new_cluster_init_scripts', None) - self.new_cluster_enable_elastic_disk = kwargs.get('new_cluster_enable_elastic_disk', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(AvroDataset, self).__init__(**kwargs) + self.type = 'Avro' # type: str + self.location = kwargs.get('location', None) + self.avro_compression_codec = kwargs.get('avro_compression_codec', None) + self.avro_compression_level = kwargs.get('avro_compression_level', None) -class ExecutionActivity(Activity): - """Base class for all execution activities. +class DatasetStorageFormat(msrest.serialization.Model): + """The format definition of a storage. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureDataExplorerCommandActivity, AzureFunctionActivity, AzureMLBatchExecutionActivity, AzureMLExecutePipelineActivity, AzureMLUpdateResourceActivity, CopyActivity, CustomActivity, DataLakeAnalyticsUSQLActivity, DatabricksNotebookActivity, DatabricksSparkJarActivity, DatabricksSparkPythonActivity, DeleteActivity, ExecuteDataFlowActivity, ExecuteSSISPackageActivity, GetMetadataActivity, HDInsightHiveActivity, HDInsightMapReduceActivity, HDInsightPigActivity, HDInsightSparkActivity, HDInsightStreamingActivity, LookupActivity, SqlServerStoredProcedureActivity, WebActivity. + sub-classes are: AvroFormat, JsonFormat, OrcFormat, ParquetFormat, TextFormat. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. Type of dataset storage format.Constant filled by server. :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param serializer: Serializer. Type: string (or Expression with resultType string). + :type serializer: object + :param deserializer: Deserializer. Type: string (or Expression with resultType string). + :type deserializer: object """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'serializer': {'key': 'serializer', 'type': 'object'}, + 'deserializer': {'key': 'deserializer', 'type': 'object'}, } _subtype_map = { - 'type': {'AzureDataExplorerCommand': 'AzureDataExplorerCommandActivity', 'AzureFunctionActivity': 'AzureFunctionActivity', 'AzureMLBatchExecution': 'AzureMLBatchExecutionActivity', 'AzureMLExecutePipeline': 'AzureMLExecutePipelineActivity', 'AzureMLUpdateResource': 'AzureMLUpdateResourceActivity', 'Copy': 'CopyActivity', 'Custom': 'CustomActivity', 'DataLakeAnalyticsU-SQL': 'DataLakeAnalyticsUSQLActivity', 'DatabricksNotebook': 'DatabricksNotebookActivity', 'DatabricksSparkJar': 'DatabricksSparkJarActivity', 'DatabricksSparkPython': 'DatabricksSparkPythonActivity', 'Delete': 'DeleteActivity', 'ExecuteDataFlow': 'ExecuteDataFlowActivity', 'ExecuteSSISPackage': 'ExecuteSSISPackageActivity', 'GetMetadata': 'GetMetadataActivity', 'HDInsightHive': 'HDInsightHiveActivity', 'HDInsightMapReduce': 'HDInsightMapReduceActivity', 'HDInsightPig': 'HDInsightPigActivity', 'HDInsightSpark': 'HDInsightSparkActivity', 'HDInsightStreaming': 'HDInsightStreamingActivity', 'Lookup': 'LookupActivity', 'SqlServerStoredProcedure': 'SqlServerStoredProcedureActivity', 'WebActivity': 'WebActivity'} + 'type': {'AvroFormat': 'AvroFormat', 'JsonFormat': 'JsonFormat', 'OrcFormat': 'OrcFormat', 'ParquetFormat': 'ParquetFormat', 'TextFormat': 'TextFormat'} } def __init__( self, **kwargs ): - super(ExecutionActivity, self).__init__(**kwargs) - self.type = 'Execution' - self.linked_service_name = kwargs.get('linked_service_name', None) - self.policy = kwargs.get('policy', None) + super(DatasetStorageFormat, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = 'DatasetStorageFormat' # type: str + self.serializer = kwargs.get('serializer', None) + self.deserializer = kwargs.get('deserializer', None) -class AzureDataExplorerCommandActivity(ExecutionActivity): - """Azure Data Explorer command activity. +class AvroFormat(DatasetStorageFormat): + """The data stored in Avro format. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. Type of dataset storage format.Constant filled by server. :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param command: Required. A control command, according to the Azure Data Explorer command - syntax. Type: string (or Expression with resultType string). - :type command: object - :param command_timeout: Control command timeout. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))..). - :type command_timeout: object + :param serializer: Serializer. Type: string (or Expression with resultType string). + :type serializer: object + :param deserializer: Deserializer. Type: string (or Expression with resultType string). + :type deserializer: object """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, - 'command': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'command': {'key': 'typeProperties.command', 'type': 'object'}, - 'command_timeout': {'key': 'typeProperties.commandTimeout', 'type': 'object'}, + 'serializer': {'key': 'serializer', 'type': 'object'}, + 'deserializer': {'key': 'deserializer', 'type': 'object'}, } def __init__( self, **kwargs ): - super(AzureDataExplorerCommandActivity, self).__init__(**kwargs) - self.type = 'AzureDataExplorerCommand' - self.command = kwargs['command'] - self.command_timeout = kwargs.get('command_timeout', None) + super(AvroFormat, self).__init__(**kwargs) + self.type = 'AvroFormat' # type: str -class AzureDataExplorerLinkedService(LinkedService): - """Azure Data Explorer (Kusto) linked service. +class CopySink(msrest.serialization.Model): + """A copy activity sink. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AvroSink, AzureBlobFSSink, AzureDataExplorerSink, AzureDataLakeStoreSink, AzureMySqlSink, AzurePostgreSqlSink, AzureQueueSink, AzureSearchIndexSink, AzureSqlSink, AzureTableSink, BinarySink, BlobSink, CommonDataServiceForAppsSink, CosmosDbMongoDbApiSink, CosmosDbSqlApiSink, DelimitedTextSink, DocumentDbCollectionSink, DynamicsCrmSink, DynamicsSink, FileSystemSink, InformixSink, JsonSink, MicrosoftAccessSink, OdbcSink, OracleSink, OrcSink, ParquetSink, SalesforceServiceCloudSink, SalesforceSink, SapCloudForCustomerSink, SqlDWSink, SqlMISink, SqlServerSink, SqlSink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Copy sink type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param endpoint: Required. The endpoint of Azure Data Explorer (the engine's endpoint). URL - will be in the format https://:code:``.:code:``.kusto.windows.net. - Type: string (or Expression with resultType string). - :type endpoint: object - :param service_principal_id: Required. The ID of the service principal used to authenticate - against Azure Data Explorer. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: Required. The key of the service principal used to authenticate - against Kusto. - :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase - :param database: Required. Database name for connection. Type: string (or Expression with - resultType string). - :type database: object - :param tenant: Required. The name or ID of the tenant to which the service principal belongs. - Type: string (or Expression with resultType string). - :type tenant: object + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object """ _validation = { 'type': {'required': True}, - 'endpoint': {'required': True}, - 'service_principal_id': {'required': True}, - 'service_principal_key': {'required': True}, - 'database': {'required': True}, - 'tenant': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + } + + _subtype_map = { + 'type': {'AvroSink': 'AvroSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'AzureQueueSink': 'AzureQueueSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureSqlSink': 'AzureSqlSink', 'AzureTableSink': 'AzureTableSink', 'BinarySink': 'BinarySink', 'BlobSink': 'BlobSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'CosmosDbSqlApiSink': 'CosmosDbSqlApiSink', 'DelimitedTextSink': 'DelimitedTextSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'FileSystemSink': 'FileSystemSink', 'InformixSink': 'InformixSink', 'JsonSink': 'JsonSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'OdbcSink': 'OdbcSink', 'OracleSink': 'OracleSink', 'OrcSink': 'OrcSink', 'ParquetSink': 'ParquetSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink'} } def __init__( self, **kwargs ): - super(AzureDataExplorerLinkedService, self).__init__(**kwargs) - self.type = 'AzureDataExplorer' - self.endpoint = kwargs['endpoint'] - self.service_principal_id = kwargs['service_principal_id'] - self.service_principal_key = kwargs['service_principal_key'] - self.database = kwargs['database'] - self.tenant = kwargs['tenant'] - - -class AzureDataExplorerTableDataset(Dataset): - """The Azure Data Explorer (Kusto) dataset. + super(CopySink, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = 'CopySink' # type: str + self.write_batch_size = kwargs.get('write_batch_size', None) + self.write_batch_timeout = kwargs.get('write_batch_timeout', None) + self.sink_retry_count = kwargs.get('sink_retry_count', None) + self.sink_retry_wait = kwargs.get('sink_retry_wait', None) + self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) + + +class AvroSink(CopySink): + """A copy activity Avro sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Copy sink type.Constant filled by server. :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table: The table name of the Azure Data Explorer database. Type: string (or Expression - with resultType string). - :type table: object + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param store_settings: Avro store settings. + :type store_settings: ~azure.synapse.artifacts.models.StoreWriteSettings + :param format_settings: Avro format settings. + :type format_settings: ~azure.synapse.artifacts.models.AvroWriteSettings """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'AvroWriteSettings'}, } def __init__( self, **kwargs ): - super(AzureDataExplorerTableDataset, self).__init__(**kwargs) - self.type = 'AzureDataExplorerTable' - self.table = kwargs.get('table', None) + super(AvroSink, self).__init__(**kwargs) + self.type = 'AvroSink' # type: str + self.store_settings = kwargs.get('store_settings', None) + self.format_settings = kwargs.get('format_settings', None) -class AzureDataLakeAnalyticsLinkedService(LinkedService): - """Azure Data Lake Analytics linked service. +class AvroSource(CopySource): + """A copy activity Avro source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param store_settings: Avro store settings. + :type store_settings: ~azure.synapse.artifacts.models.StoreReadSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + } + + def __init__( + self, + **kwargs + ): + super(AvroSource, self).__init__(**kwargs) + self.type = 'AvroSource' # type: str + self.store_settings = kwargs.get('store_settings', None) + + +class FormatWriteSettings(msrest.serialization.Model): + """Format write settings. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AvroWriteSettings, DelimitedTextWriteSettings, JsonWriteSettings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The write setting type.Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'AvroWriteSettings': 'AvroWriteSettings', 'DelimitedTextWriteSettings': 'DelimitedTextWriteSettings', 'JsonWriteSettings': 'JsonWriteSettings'} + } + + def __init__( + self, + **kwargs + ): + super(FormatWriteSettings, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = 'FormatWriteSettings' # type: str + + +class AvroWriteSettings(FormatWriteSettings): + """Avro write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The write setting type.Constant filled by server. + :type type: str + :param record_name: Top level record name in write result, which is required in AVRO spec. + :type record_name: str + :param record_namespace: Record namespace in the write result. + :type record_namespace: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'record_name': {'key': 'recordName', 'type': 'str'}, + 'record_namespace': {'key': 'recordNamespace', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(AvroWriteSettings, self).__init__(**kwargs) + self.type = 'AvroWriteSettings' # type: str + self.record_name = kwargs.get('record_name', None) + self.record_namespace = kwargs.get('record_namespace', None) + + +class AzureBatchLinkedService(LinkedService): + """Azure Batch linked service. All required parameters must be populated in order to send to Azure. @@ -1507,27 +1685,19 @@ class AzureDataLakeAnalyticsLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param account_name: Required. The Azure Data Lake Analytics account name. Type: string (or - Expression with resultType string). + :param account_name: Required. The Azure Batch account name. Type: string (or Expression with + resultType string). :type account_name: object - :param service_principal_id: The ID of the application used to authenticate against the Azure - Data Lake Analytics account. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The Key of the application used to authenticate against the Azure - Data Lake Analytics account. - :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase - :param tenant: Required. The name or ID of the tenant to which the service principal belongs. - Type: string (or Expression with resultType string). - :type tenant: object - :param subscription_id: Data Lake Analytics account subscription ID (if different from Data - Factory account). Type: string (or Expression with resultType string). - :type subscription_id: object - :param resource_group_name: Data Lake Analytics account resource group name (if different from - Data Factory account). Type: string (or Expression with resultType string). - :type resource_group_name: object - :param data_lake_analytics_uri: Azure Data Lake Analytics URI Type: string (or Expression with + :param access_key: The Azure Batch account access key. + :type access_key: ~azure.synapse.artifacts.models.SecretBase + :param batch_uri: Required. The Azure Batch URI. Type: string (or Expression with resultType + string). + :type batch_uri: object + :param pool_name: Required. The Azure Batch pool name. Type: string (or Expression with resultType string). - :type data_lake_analytics_uri: object + :type pool_name: object + :param linked_service_name: Required. The Azure Storage linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -1537,7 +1707,9 @@ class AzureDataLakeAnalyticsLinkedService(LinkedService): _validation = { 'type': {'required': True}, 'account_name': {'required': True}, - 'tenant': {'required': True}, + 'batch_uri': {'required': True}, + 'pool_name': {'required': True}, + 'linked_service_name': {'required': True}, } _attribute_map = { @@ -1548,12 +1720,10 @@ class AzureDataLakeAnalyticsLinkedService(LinkedService): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'account_name': {'key': 'typeProperties.accountName', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'subscription_id': {'key': 'typeProperties.subscriptionId', 'type': 'object'}, - 'resource_group_name': {'key': 'typeProperties.resourceGroupName', 'type': 'object'}, - 'data_lake_analytics_uri': {'key': 'typeProperties.dataLakeAnalyticsUri', 'type': 'object'}, + 'access_key': {'key': 'typeProperties.accessKey', 'type': 'SecretBase'}, + 'batch_uri': {'key': 'typeProperties.batchUri', 'type': 'object'}, + 'pool_name': {'key': 'typeProperties.poolName', 'type': 'object'}, + 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -1561,20 +1731,18 @@ def __init__( self, **kwargs ): - super(AzureDataLakeAnalyticsLinkedService, self).__init__(**kwargs) - self.type = 'AzureDataLakeAnalytics' + super(AzureBatchLinkedService, self).__init__(**kwargs) + self.type = 'AzureBatch' # type: str self.account_name = kwargs['account_name'] - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.tenant = kwargs['tenant'] - self.subscription_id = kwargs.get('subscription_id', None) - self.resource_group_name = kwargs.get('resource_group_name', None) - self.data_lake_analytics_uri = kwargs.get('data_lake_analytics_uri', None) + self.access_key = kwargs.get('access_key', None) + self.batch_uri = kwargs['batch_uri'] + self.pool_name = kwargs['pool_name'] + self.linked_service_name = kwargs['linked_service_name'] self.encrypted_credential = kwargs.get('encrypted_credential', None) -class AzureDataLakeStoreLinkedService(LinkedService): - """Azure Data Lake Store linked service. +class AzureBlobFSLinkedService(LinkedService): + """Azure Data Lake Storage Gen2 linked service. All required parameters must be populated in order to send to Azure. @@ -1591,27 +1759,21 @@ class AzureDataLakeStoreLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param data_lake_store_uri: Required. Data Lake Store service URI. Type: string (or Expression - with resultType string). - :type data_lake_store_uri: object + :param url: Required. Endpoint for the Azure Data Lake Storage Gen2 service. Type: string (or + Expression with resultType string). + :type url: object + :param account_key: Account key for the Azure Data Lake Storage Gen2 service. Type: string (or + Expression with resultType string). + :type account_key: object :param service_principal_id: The ID of the application used to authenticate against the Azure - Data Lake Store account. Type: string (or Expression with resultType string). + Data Lake Storage Gen2 account. Type: string (or Expression with resultType string). :type service_principal_id: object :param service_principal_key: The Key of the application used to authenticate against the Azure - Data Lake Store account. + Data Lake Storage Gen2 account. :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). :type tenant: object - :param account_name: Data Lake Store account name. Type: string (or Expression with resultType - string). - :type account_name: object - :param subscription_id: Data Lake Store account subscription ID (if different from Data Factory - account). Type: string (or Expression with resultType string). - :type subscription_id: object - :param resource_group_name: Data Lake Store account resource group name (if different from Data - Factory account). Type: string (or Expression with resultType string). - :type resource_group_name: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -1620,7 +1782,7 @@ class AzureDataLakeStoreLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'data_lake_store_uri': {'required': True}, + 'url': {'required': True}, } _attribute_map = { @@ -1630,13 +1792,11 @@ class AzureDataLakeStoreLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'data_lake_store_uri': {'key': 'typeProperties.dataLakeStoreUri', 'type': 'object'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'account_key': {'key': 'typeProperties.accountKey', 'type': 'object'}, 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'account_name': {'key': 'typeProperties.accountName', 'type': 'object'}, - 'subscription_id': {'key': 'typeProperties.subscriptionId', 'type': 'object'}, - 'resource_group_name': {'key': 'typeProperties.resourceGroupName', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -1644,265 +1804,247 @@ def __init__( self, **kwargs ): - super(AzureDataLakeStoreLinkedService, self).__init__(**kwargs) - self.type = 'AzureDataLakeStore' - self.data_lake_store_uri = kwargs['data_lake_store_uri'] + super(AzureBlobFSLinkedService, self).__init__(**kwargs) + self.type = 'AzureBlobFS' # type: str + self.url = kwargs['url'] + self.account_key = kwargs.get('account_key', None) self.service_principal_id = kwargs.get('service_principal_id', None) self.service_principal_key = kwargs.get('service_principal_key', None) self.tenant = kwargs.get('tenant', None) - self.account_name = kwargs.get('account_name', None) - self.subscription_id = kwargs.get('subscription_id', None) - self.resource_group_name = kwargs.get('resource_group_name', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) -class AzureFileStorageLinkedService(LinkedService): - """Azure File Storage linked service. +class AzureBlobFSLocation(DatasetLocation): + """The location of azure blobFS dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Type of dataset storage location.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param host: Required. Host name of the server. Type: string (or Expression with resultType - string). - :type host: object - :param user_id: User ID to logon the server. Type: string (or Expression with resultType + :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). - :type user_id: object - :param password: Password to logon the server. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with + :type file_name: object + :param file_system: Specify the fileSystem of azure blobFS. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type file_system: object """ _validation = { 'type': {'required': True}, - 'host': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'user_id': {'key': 'typeProperties.userId', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'file_system': {'key': 'fileSystem', 'type': 'object'}, } def __init__( self, **kwargs ): - super(AzureFileStorageLinkedService, self).__init__(**kwargs) - self.type = 'AzureFileStorage' - self.host = kwargs['host'] - self.user_id = kwargs.get('user_id', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(AzureBlobFSLocation, self).__init__(**kwargs) + self.type = 'AzureBlobFSLocation' # type: str + self.file_system = kwargs.get('file_system', None) -class AzureFunctionActivity(ExecutionActivity): - """Azure Function activity. +class AzureBlobFSReadSettings(StoreReadSettings): + """Azure blobFS read settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. The read setting type.Constant filled by server. :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param method: Required. Rest API method for target endpoint. Possible values include: "GET", - "POST", "PUT", "DELETE", "OPTIONS", "HEAD", "TRACE". - :type method: str or ~azure.synapse.artifacts.models.AzureFunctionActivityMethod - :param function_name: Required. Name of the Function that the Azure Function Activity will - call. Type: string (or Expression with resultType string). - :type function_name: object - :param headers: Represents the headers that will be sent to the request. For example, to set - the language and type on a request: "headers" : { "Accept-Language": "en-us", "Content-Type": - "application/json" }. Type: string (or Expression with resultType string). - :type headers: object - :param body: Represents the payload that will be sent to the endpoint. Required for POST/PUT - method, not allowed for GET method Type: string (or Expression with resultType string). - :type body: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :type recursive: object + :param wildcard_folder_path: Azure blobFS wildcardFolderPath. Type: string (or Expression with + resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Azure blobFS wildcardFileName. Type: string (or Expression with + resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. Type: string (or + Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression + with resultType string). + :type modified_datetime_end: object """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, - 'method': {'required': True}, - 'function_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'method': {'key': 'typeProperties.method', 'type': 'str'}, - 'function_name': {'key': 'typeProperties.functionName', 'type': 'object'}, - 'headers': {'key': 'typeProperties.headers', 'type': 'object'}, - 'body': {'key': 'typeProperties.body', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } def __init__( self, **kwargs ): - super(AzureFunctionActivity, self).__init__(**kwargs) - self.type = 'AzureFunctionActivity' - self.method = kwargs['method'] - self.function_name = kwargs['function_name'] - self.headers = kwargs.get('headers', None) - self.body = kwargs.get('body', None) + super(AzureBlobFSReadSettings, self).__init__(**kwargs) + self.type = 'AzureBlobFSReadSettings' # type: str + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) -class AzureFunctionLinkedService(LinkedService): - """Azure Function linked service. +class AzureBlobFSSink(CopySink): + """A copy activity Azure Data Lake Storage Gen2 sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Copy sink type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param function_app_url: Required. The endpoint of the Azure Function App. URL will be in the - format https://:code:``.azurewebsites.net. - :type function_app_url: object - :param function_key: Function or Host key for Azure Function App. - :type function_key: ~azure.synapse.artifacts.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object """ _validation = { 'type': {'required': True}, - 'function_app_url': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'function_app_url': {'key': 'typeProperties.functionAppUrl', 'type': 'object'}, - 'function_key': {'key': 'typeProperties.functionKey', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } def __init__( self, **kwargs ): - super(AzureFunctionLinkedService, self).__init__(**kwargs) - self.type = 'AzureFunction' - self.function_app_url = kwargs['function_app_url'] - self.function_key = kwargs.get('function_key', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(AzureBlobFSSink, self).__init__(**kwargs) + self.type = 'AzureBlobFSSink' # type: str + self.copy_behavior = kwargs.get('copy_behavior', None) -class AzureKeyVaultLinkedService(LinkedService): - """Azure Key Vault linked service. +class AzureBlobFSSource(CopySource): + """A copy activity Azure BlobFS source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param base_url: Required. The base URL of the Azure Key Vault. e.g. - https://myakv.vault.azure.net Type: string (or Expression with resultType string). - :type base_url: object + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param treat_empty_as_null: Treat empty as null. Type: boolean (or Expression with resultType + boolean). + :type treat_empty_as_null: object + :param skip_header_line_count: Number of header lines to skip from each blob. Type: integer (or + Expression with resultType integer). + :type skip_header_line_count: object + :param recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :type recursive: object """ _validation = { 'type': {'required': True}, - 'base_url': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'base_url': {'key': 'typeProperties.baseUrl', 'type': 'object'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, + 'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, } def __init__( self, **kwargs ): - super(AzureKeyVaultLinkedService, self).__init__(**kwargs) - self.type = 'AzureKeyVault' - self.base_url = kwargs['base_url'] + super(AzureBlobFSSource, self).__init__(**kwargs) + self.type = 'AzureBlobFSSource' # type: str + self.treat_empty_as_null = kwargs.get('treat_empty_as_null', None) + self.skip_header_line_count = kwargs.get('skip_header_line_count', None) + self.recursive = kwargs.get('recursive', None) -class SecretBase(msrest.serialization.Model): - """The base definition of a secret type. +class StoreWriteSettings(msrest.serialization.Model): + """Connector write settings. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureKeyVaultSecretReference, SecureString. + sub-classes are: AzureBlobFSWriteSettings, AzureBlobStorageWriteSettings, AzureDataLakeStoreWriteSettings, FileServerWriteSettings, SftpWriteSettings. All required parameters must be populated in order to send to Azure. - :param type: Required. Type of the secret.Constant filled by server. + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The write setting type.Constant filled by server. :type type: str + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object """ _validation = { @@ -1910,64 +2052,70 @@ class SecretBase(msrest.serialization.Model): } _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } _subtype_map = { - 'type': {'AzureKeyVaultSecret': 'AzureKeyVaultSecretReference', 'SecureString': 'SecureString'} + 'type': {'AzureBlobFSWriteSettings': 'AzureBlobFSWriteSettings', 'AzureBlobStorageWriteSettings': 'AzureBlobStorageWriteSettings', 'AzureDataLakeStoreWriteSettings': 'AzureDataLakeStoreWriteSettings', 'FileServerWriteSettings': 'FileServerWriteSettings', 'SftpWriteSettings': 'SftpWriteSettings'} } def __init__( self, **kwargs ): - super(SecretBase, self).__init__(**kwargs) - self.type = None + super(StoreWriteSettings, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = 'StoreWriteSettings' # type: str + self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) + self.copy_behavior = kwargs.get('copy_behavior', None) -class AzureKeyVaultSecretReference(SecretBase): - """Azure Key Vault secret reference. +class AzureBlobFSWriteSettings(StoreWriteSettings): + """Azure blobFS write settings. All required parameters must be populated in order to send to Azure. - :param type: Required. Type of the secret.Constant filled by server. + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The write setting type.Constant filled by server. :type type: str - :param store: Required. The Azure Key Vault linked service reference. - :type store: ~azure.synapse.artifacts.models.LinkedServiceReference - :param secret_name: Required. The name of the secret in Azure Key Vault. Type: string (or - Expression with resultType string). - :type secret_name: object - :param secret_version: The version of the secret in Azure Key Vault. The default value is the - latest version of the secret. Type: string (or Expression with resultType string). - :type secret_version: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + :param block_size_in_mb: Indicates the block size(MB) when writing data to blob. Type: integer + (or Expression with resultType integer). + :type block_size_in_mb: object """ _validation = { 'type': {'required': True}, - 'store': {'required': True}, - 'secret_name': {'required': True}, } _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'store': {'key': 'store', 'type': 'LinkedServiceReference'}, - 'secret_name': {'key': 'secretName', 'type': 'object'}, - 'secret_version': {'key': 'secretVersion', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'block_size_in_mb': {'key': 'blockSizeInMB', 'type': 'object'}, } def __init__( self, **kwargs ): - super(AzureKeyVaultSecretReference, self).__init__(**kwargs) - self.type = 'AzureKeyVaultSecret' - self.store = kwargs['store'] - self.secret_name = kwargs['secret_name'] - self.secret_version = kwargs.get('secret_version', None) + super(AzureBlobFSWriteSettings, self).__init__(**kwargs) + self.type = 'AzureBlobFSWriteSettings' # type: str + self.block_size_in_mb = kwargs.get('block_size_in_mb', None) -class AzureMariaDBLinkedService(LinkedService): - """Azure Database for MariaDB linked service. +class AzureBlobStorageLinkedService(LinkedService): + """The azure blob storage linked service. All required parameters must be populated in order to send to Azure. @@ -1984,18 +2132,36 @@ class AzureMariaDBLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param connection_string: An ODBC connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. + :param connection_string: The connection string. It is mutually exclusive with sasUri, + serviceEndpoint property. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object - :param pwd: The Azure key vault secret reference of password in connection string. - :type pwd: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { + :param account_key: The Azure key vault secret reference of accountKey in connection string. + :type account_key: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param sas_uri: SAS URI of the Azure Blob Storage resource. It is mutually exclusive with + connectionString, serviceEndpoint property. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type sas_uri: object + :param sas_token: The Azure key vault secret reference of sasToken in sas uri. + :type sas_token: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param service_endpoint: Blob service endpoint of the Azure Blob Storage resource. It is + mutually exclusive with connectionString, sasUri property. + :type service_endpoint: str + :param service_principal_id: The ID of the service principal used to authenticate against Azure + SQL Data Warehouse. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The key of the service principal used to authenticate against + Azure SQL Data Warehouse. + :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal belongs. Type: + string (or Expression with resultType string). + :type tenant: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: str + """ + + _validation = { 'type': {'required': True}, } @@ -2007,230 +2173,185 @@ class AzureMariaDBLinkedService(LinkedService): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, + 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, + 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, + 'service_endpoint': {'key': 'typeProperties.serviceEndpoint', 'type': 'str'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, } def __init__( self, **kwargs ): - super(AzureMariaDBLinkedService, self).__init__(**kwargs) - self.type = 'AzureMariaDB' + super(AzureBlobStorageLinkedService, self).__init__(**kwargs) + self.type = 'AzureBlobStorage' # type: str self.connection_string = kwargs.get('connection_string', None) - self.pwd = kwargs.get('pwd', None) + self.account_key = kwargs.get('account_key', None) + self.sas_uri = kwargs.get('sas_uri', None) + self.sas_token = kwargs.get('sas_token', None) + self.service_endpoint = kwargs.get('service_endpoint', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs.get('tenant', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) -class AzureMariaDBTableDataset(Dataset): - """Azure Database for MariaDB dataset. +class AzureBlobStorageLocation(DatasetLocation): + """The location of azure blob dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Type of dataset storage location.Constant filled by server. :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + string). + :type file_name: object + :param container: Specify the container of azure blob. Type: string (or Expression with + resultType string). + :type container: object """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'container': {'key': 'container', 'type': 'object'}, } def __init__( self, **kwargs ): - super(AzureMariaDBTableDataset, self).__init__(**kwargs) - self.type = 'AzureMariaDBTable' - self.table_name = kwargs.get('table_name', None) + super(AzureBlobStorageLocation, self).__init__(**kwargs) + self.type = 'AzureBlobStorageLocation' # type: str + self.container = kwargs.get('container', None) -class AzureMLBatchExecutionActivity(ExecutionActivity): - """Azure ML Batch Execution activity. +class AzureBlobStorageReadSettings(StoreReadSettings): + """Azure blob read settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. The read setting type.Constant filled by server. :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param global_parameters: Key,Value pairs to be passed to the Azure ML Batch Execution Service - endpoint. Keys must match the names of web service parameters defined in the published Azure ML - web service. Values will be passed in the GlobalParameters property of the Azure ML batch - execution request. - :type global_parameters: dict[str, object] - :param web_service_outputs: Key,Value pairs, mapping the names of Azure ML endpoint's Web - Service Outputs to AzureMLWebServiceFile objects specifying the output Blob locations. This - information will be passed in the WebServiceOutputs property of the Azure ML batch execution - request. - :type web_service_outputs: dict[str, ~azure.synapse.artifacts.models.AzureMLWebServiceFile] - :param web_service_inputs: Key,Value pairs, mapping the names of Azure ML endpoint's Web - Service Inputs to AzureMLWebServiceFile objects specifying the input Blob locations.. This - information will be passed in the WebServiceInputs property of the Azure ML batch execution - request. - :type web_service_inputs: dict[str, ~azure.synapse.artifacts.models.AzureMLWebServiceFile] + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :type recursive: object + :param wildcard_folder_path: Azure blob wildcardFolderPath. Type: string (or Expression with + resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Azure blob wildcardFileName. Type: string (or Expression with + resultType string). + :type wildcard_file_name: object + :param prefix: The prefix filter for the Azure Blob name. Type: string (or Expression with + resultType string). + :type prefix: object + :param enable_partition_discovery: Indicates whether to enable partition discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. Type: string (or + Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression + with resultType string). + :type modified_datetime_end: object """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'global_parameters': {'key': 'typeProperties.globalParameters', 'type': '{object}'}, - 'web_service_outputs': {'key': 'typeProperties.webServiceOutputs', 'type': '{AzureMLWebServiceFile}'}, - 'web_service_inputs': {'key': 'typeProperties.webServiceInputs', 'type': '{AzureMLWebServiceFile}'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'prefix': {'key': 'prefix', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } def __init__( self, **kwargs ): - super(AzureMLBatchExecutionActivity, self).__init__(**kwargs) - self.type = 'AzureMLBatchExecution' - self.global_parameters = kwargs.get('global_parameters', None) - self.web_service_outputs = kwargs.get('web_service_outputs', None) - self.web_service_inputs = kwargs.get('web_service_inputs', None) + super(AzureBlobStorageReadSettings, self).__init__(**kwargs) + self.type = 'AzureBlobStorageReadSettings' # type: str + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.prefix = kwargs.get('prefix', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) -class AzureMLExecutePipelineActivity(ExecutionActivity): - """Azure ML Execute Pipeline activity. +class AzureBlobStorageWriteSettings(StoreWriteSettings): + """Azure blob write settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. The write setting type.Constant filled by server. :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param ml_pipeline_id: Required. ID of the published Azure ML pipeline. Type: string (or - Expression with resultType string). - :type ml_pipeline_id: object - :param experiment_name: Run history experiment name of the pipeline run. This information will - be passed in the ExperimentName property of the published pipeline execution request. Type: - string (or Expression with resultType string). - :type experiment_name: object - :param ml_pipeline_parameters: Key,Value pairs to be passed to the published Azure ML pipeline - endpoint. Keys must match the names of pipeline parameters defined in the published pipeline. - Values will be passed in the ParameterAssignments property of the published pipeline execution - request. Type: object with key value pairs (or Expression with resultType object). - :type ml_pipeline_parameters: object - :param ml_parent_run_id: The parent Azure ML Service pipeline run id. This information will be - passed in the ParentRunId property of the published pipeline execution request. Type: string - (or Expression with resultType string). - :type ml_parent_run_id: object - :param continue_on_step_failure: Whether to continue execution of other steps in the - PipelineRun if a step fails. This information will be passed in the continueOnStepFailure - property of the published pipeline execution request. Type: boolean (or Expression with - resultType boolean). - :type continue_on_step_failure: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + :param block_size_in_mb: Indicates the block size(MB) when writing data to blob. Type: integer + (or Expression with resultType integer). + :type block_size_in_mb: object """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, - 'ml_pipeline_id': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'ml_pipeline_id': {'key': 'typeProperties.mlPipelineId', 'type': 'object'}, - 'experiment_name': {'key': 'typeProperties.experimentName', 'type': 'object'}, - 'ml_pipeline_parameters': {'key': 'typeProperties.mlPipelineParameters', 'type': 'object'}, - 'ml_parent_run_id': {'key': 'typeProperties.mlParentRunId', 'type': 'object'}, - 'continue_on_step_failure': {'key': 'typeProperties.continueOnStepFailure', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'block_size_in_mb': {'key': 'blockSizeInMB', 'type': 'object'}, } def __init__( self, **kwargs ): - super(AzureMLExecutePipelineActivity, self).__init__(**kwargs) - self.type = 'AzureMLExecutePipeline' - self.ml_pipeline_id = kwargs['ml_pipeline_id'] - self.experiment_name = kwargs.get('experiment_name', None) - self.ml_pipeline_parameters = kwargs.get('ml_pipeline_parameters', None) - self.ml_parent_run_id = kwargs.get('ml_parent_run_id', None) - self.continue_on_step_failure = kwargs.get('continue_on_step_failure', None) + super(AzureBlobStorageWriteSettings, self).__init__(**kwargs) + self.type = 'AzureBlobStorageWriteSettings' # type: str + self.block_size_in_mb = kwargs.get('block_size_in_mb', None) -class AzureMLLinkedService(LinkedService): - """Azure ML Studio Web Service linked service. +class AzureDatabricksLinkedService(LinkedService): + """Azure Databricks linked service. All required parameters must be populated in order to send to Azure. @@ -2247,104 +2368,55 @@ class AzureMLLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param ml_endpoint: Required. The Batch Execution REST URL for an Azure ML Studio Web Service - endpoint. Type: string (or Expression with resultType string). - :type ml_endpoint: object - :param api_key: Required. The API key for accessing the Azure ML model endpoint. - :type api_key: ~azure.synapse.artifacts.models.SecretBase - :param update_resource_endpoint: The Update Resource REST URL for an Azure ML Studio Web - Service endpoint. Type: string (or Expression with resultType string). - :type update_resource_endpoint: object - :param service_principal_id: The ID of the service principal used to authenticate against the - ARM-based updateResourceEndpoint of an Azure ML Studio web service. Type: string (or Expression + :param domain: Required. :code:``.azuredatabricks.net, domain name of your Databricks + deployment. Type: string (or Expression with resultType string). + :type domain: object + :param access_token: Required. Access token for databricks REST API. Refer to + https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The key of the service principal used to authenticate against the - ARM-based updateResourceEndpoint of an Azure ML Studio web service. - :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal belongs. Type: - string (or Expression with resultType string). - :type tenant: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'ml_endpoint': {'required': True}, - 'api_key': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'ml_endpoint': {'key': 'typeProperties.mlEndpoint', 'type': 'object'}, - 'api_key': {'key': 'typeProperties.apiKey', 'type': 'SecretBase'}, - 'update_resource_endpoint': {'key': 'typeProperties.updateResourceEndpoint', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureMLLinkedService, self).__init__(**kwargs) - self.type = 'AzureML' - self.ml_endpoint = kwargs['ml_endpoint'] - self.api_key = kwargs['api_key'] - self.update_resource_endpoint = kwargs.get('update_resource_endpoint', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.tenant = kwargs.get('tenant', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class AzureMLServiceLinkedService(LinkedService): - """Azure ML Service linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param subscription_id: Required. Azure ML Service workspace subscription ID. Type: string (or - Expression with resultType string). - :type subscription_id: object - :param resource_group_name: Required. Azure ML Service workspace resource group name. Type: - string (or Expression with resultType string). - :type resource_group_name: object - :param ml_workspace_name: Required. Azure ML Service workspace name. Type: string (or - Expression with resultType string). - :type ml_workspace_name: object - :param service_principal_id: The ID of the service principal used to authenticate against the - endpoint of a published Azure ML Service pipeline. Type: string (or Expression with resultType + :type access_token: ~azure.synapse.artifacts.models.SecretBase + :param existing_cluster_id: The id of an existing interactive cluster that will be used for all + runs of this activity. Type: string (or Expression with resultType string). + :type existing_cluster_id: object + :param instance_pool_id: The id of an existing instance pool that will be used for all runs of + this activity. Type: string (or Expression with resultType string). + :type instance_pool_id: object + :param new_cluster_version: If not using an existing interactive cluster, this specifies the + Spark version of a new job cluster or instance pool nodes created for each run of this + activity. Required if instancePoolId is specified. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The key of the service principal used to authenticate against the - endpoint of a published Azure ML Service pipeline. - :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal belongs. Type: - string (or Expression with resultType string). - :type tenant: object + :type new_cluster_version: object + :param new_cluster_num_of_worker: If not using an existing interactive cluster, this specifies + the number of worker nodes to use for the new job cluster or instance pool. For new job + clusters, this a string-formatted Int32, like '1' means numOfWorker is 1 or '1:10' means auto- + scale from 1 (min) to 10 (max). For instance pools, this is a string-formatted Int32, and can + only specify a fixed number of worker nodes, such as '2'. Required if newClusterVersion is + specified. Type: string (or Expression with resultType string). + :type new_cluster_num_of_worker: object + :param new_cluster_node_type: The node type of the new job cluster. This property is required + if newClusterVersion is specified and instancePoolId is not specified. If instancePoolId is + specified, this property is ignored. Type: string (or Expression with resultType string). + :type new_cluster_node_type: object + :param new_cluster_spark_conf: A set of optional, user-specified Spark configuration key-value + pairs. + :type new_cluster_spark_conf: dict[str, object] + :param new_cluster_spark_env_vars: A set of optional, user-specified Spark environment + variables key-value pairs. + :type new_cluster_spark_env_vars: dict[str, object] + :param new_cluster_custom_tags: Additional tags for cluster resources. This property is ignored + in instance pool configurations. + :type new_cluster_custom_tags: dict[str, object] + :param new_cluster_driver_node_type: The driver node type for the new job cluster. This + property is ignored in instance pool configurations. Type: string (or Expression with + resultType string). + :type new_cluster_driver_node_type: object + :param new_cluster_init_scripts: User-defined initialization scripts for the new cluster. Type: + array of strings (or Expression with resultType array of strings). + :type new_cluster_init_scripts: object + :param new_cluster_enable_elastic_disk: Enable the elastic disk on the new cluster. This + property is now ignored, and takes the default elastic disk behavior in Databricks (elastic + disks are always enabled). Type: boolean (or Expression with resultType boolean). + :type new_cluster_enable_elastic_disk: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -2353,9 +2425,8 @@ class AzureMLServiceLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'subscription_id': {'required': True}, - 'resource_group_name': {'required': True}, - 'ml_workspace_name': {'required': True}, + 'domain': {'required': True}, + 'access_token': {'required': True}, } _attribute_map = { @@ -2365,12 +2436,19 @@ class AzureMLServiceLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'subscription_id': {'key': 'typeProperties.subscriptionId', 'type': 'object'}, - 'resource_group_name': {'key': 'typeProperties.resourceGroupName', 'type': 'object'}, - 'ml_workspace_name': {'key': 'typeProperties.mlWorkspaceName', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'domain': {'key': 'typeProperties.domain', 'type': 'object'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'existing_cluster_id': {'key': 'typeProperties.existingClusterId', 'type': 'object'}, + 'instance_pool_id': {'key': 'typeProperties.instancePoolId', 'type': 'object'}, + 'new_cluster_version': {'key': 'typeProperties.newClusterVersion', 'type': 'object'}, + 'new_cluster_num_of_worker': {'key': 'typeProperties.newClusterNumOfWorker', 'type': 'object'}, + 'new_cluster_node_type': {'key': 'typeProperties.newClusterNodeType', 'type': 'object'}, + 'new_cluster_spark_conf': {'key': 'typeProperties.newClusterSparkConf', 'type': '{object}'}, + 'new_cluster_spark_env_vars': {'key': 'typeProperties.newClusterSparkEnvVars', 'type': '{object}'}, + 'new_cluster_custom_tags': {'key': 'typeProperties.newClusterCustomTags', 'type': '{object}'}, + 'new_cluster_driver_node_type': {'key': 'typeProperties.newClusterDriverNodeType', 'type': 'object'}, + 'new_cluster_init_scripts': {'key': 'typeProperties.newClusterInitScripts', 'type': 'object'}, + 'new_cluster_enable_elastic_disk': {'key': 'typeProperties.newClusterEnableElasticDisk', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -2378,19 +2456,29 @@ def __init__( self, **kwargs ): - super(AzureMLServiceLinkedService, self).__init__(**kwargs) - self.type = 'AzureMLService' - self.subscription_id = kwargs['subscription_id'] - self.resource_group_name = kwargs['resource_group_name'] - self.ml_workspace_name = kwargs['ml_workspace_name'] - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.tenant = kwargs.get('tenant', None) + super(AzureDatabricksLinkedService, self).__init__(**kwargs) + self.type = 'AzureDatabricks' # type: str + self.domain = kwargs['domain'] + self.access_token = kwargs['access_token'] + self.existing_cluster_id = kwargs.get('existing_cluster_id', None) + self.instance_pool_id = kwargs.get('instance_pool_id', None) + self.new_cluster_version = kwargs.get('new_cluster_version', None) + self.new_cluster_num_of_worker = kwargs.get('new_cluster_num_of_worker', None) + self.new_cluster_node_type = kwargs.get('new_cluster_node_type', None) + self.new_cluster_spark_conf = kwargs.get('new_cluster_spark_conf', None) + self.new_cluster_spark_env_vars = kwargs.get('new_cluster_spark_env_vars', None) + self.new_cluster_custom_tags = kwargs.get('new_cluster_custom_tags', None) + self.new_cluster_driver_node_type = kwargs.get('new_cluster_driver_node_type', None) + self.new_cluster_init_scripts = kwargs.get('new_cluster_init_scripts', None) + self.new_cluster_enable_elastic_disk = kwargs.get('new_cluster_enable_elastic_disk', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) -class AzureMLUpdateResourceActivity(ExecutionActivity): - """Azure ML Update Resource management activity. +class ExecutionActivity(Activity): + """Base class for all execution activities. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AzureDataExplorerCommandActivity, AzureFunctionActivity, AzureMLBatchExecutionActivity, AzureMLExecutePipelineActivity, AzureMLUpdateResourceActivity, CopyActivity, CustomActivity, DataLakeAnalyticsUSQLActivity, DatabricksNotebookActivity, DatabricksSparkJarActivity, DatabricksSparkPythonActivity, DeleteActivity, ExecuteDataFlowActivity, ExecuteSSISPackageActivity, GetMetadataActivity, HDInsightHiveActivity, HDInsightMapReduceActivity, HDInsightPigActivity, HDInsightSparkActivity, HDInsightStreamingActivity, LookupActivity, SqlServerStoredProcedureActivity, WebActivity. All required parameters must be populated in order to send to Azure. @@ -2411,24 +2499,11 @@ class AzureMLUpdateResourceActivity(ExecutionActivity): :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param trained_model_name: Required. Name of the Trained Model module in the Web Service - experiment to be updated. Type: string (or Expression with resultType string). - :type trained_model_name: object - :param trained_model_linked_service_name: Required. Name of Azure Storage linked service - holding the .ilearner file that will be uploaded by the update operation. - :type trained_model_linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param trained_model_file_path: Required. The relative file path in trainedModelLinkedService - to represent the .ilearner file that will be uploaded by the update operation. Type: string - (or Expression with resultType string). - :type trained_model_file_path: object """ _validation = { 'name': {'required': True}, 'type': {'required': True}, - 'trained_model_name': {'required': True}, - 'trained_model_linked_service_name': {'required': True}, - 'trained_model_file_path': {'required': True}, } _attribute_map = { @@ -2440,56 +2515,83 @@ class AzureMLUpdateResourceActivity(ExecutionActivity): 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'trained_model_name': {'key': 'typeProperties.trainedModelName', 'type': 'object'}, - 'trained_model_linked_service_name': {'key': 'typeProperties.trainedModelLinkedServiceName', 'type': 'LinkedServiceReference'}, - 'trained_model_file_path': {'key': 'typeProperties.trainedModelFilePath', 'type': 'object'}, + } + + _subtype_map = { + 'type': {'AzureDataExplorerCommand': 'AzureDataExplorerCommandActivity', 'AzureFunctionActivity': 'AzureFunctionActivity', 'AzureMLBatchExecution': 'AzureMLBatchExecutionActivity', 'AzureMLExecutePipeline': 'AzureMLExecutePipelineActivity', 'AzureMLUpdateResource': 'AzureMLUpdateResourceActivity', 'Copy': 'CopyActivity', 'Custom': 'CustomActivity', 'DataLakeAnalyticsU-SQL': 'DataLakeAnalyticsUSQLActivity', 'DatabricksNotebook': 'DatabricksNotebookActivity', 'DatabricksSparkJar': 'DatabricksSparkJarActivity', 'DatabricksSparkPython': 'DatabricksSparkPythonActivity', 'Delete': 'DeleteActivity', 'ExecuteDataFlow': 'ExecuteDataFlowActivity', 'ExecuteSSISPackage': 'ExecuteSSISPackageActivity', 'GetMetadata': 'GetMetadataActivity', 'HDInsightHive': 'HDInsightHiveActivity', 'HDInsightMapReduce': 'HDInsightMapReduceActivity', 'HDInsightPig': 'HDInsightPigActivity', 'HDInsightSpark': 'HDInsightSparkActivity', 'HDInsightStreaming': 'HDInsightStreamingActivity', 'Lookup': 'LookupActivity', 'SqlServerStoredProcedure': 'SqlServerStoredProcedureActivity', 'WebActivity': 'WebActivity'} } def __init__( self, **kwargs ): - super(AzureMLUpdateResourceActivity, self).__init__(**kwargs) - self.type = 'AzureMLUpdateResource' - self.trained_model_name = kwargs['trained_model_name'] - self.trained_model_linked_service_name = kwargs['trained_model_linked_service_name'] - self.trained_model_file_path = kwargs['trained_model_file_path'] + super(ExecutionActivity, self).__init__(**kwargs) + self.type = 'Execution' # type: str + self.linked_service_name = kwargs.get('linked_service_name', None) + self.policy = kwargs.get('policy', None) -class AzureMLWebServiceFile(msrest.serialization.Model): - """Azure ML WebService Input/Output file. +class AzureDataExplorerCommandActivity(ExecutionActivity): + """Azure Data Explorer command activity. All required parameters must be populated in order to send to Azure. - :param file_path: Required. The relative file path, including container name, in the Azure Blob - Storage specified by the LinkedService. Type: string (or Expression with resultType string). - :type file_path: object - :param linked_service_name: Required. Reference to an Azure Storage LinkedService, where Azure - ML WebService Input/Output file located. + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param command: Required. A control command, according to the Azure Data Explorer command + syntax. Type: string (or Expression with resultType string). + :type command: object + :param command_timeout: Control command timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))..). + :type command_timeout: object """ _validation = { - 'file_path': {'required': True}, - 'linked_service_name': {'required': True}, + 'name': {'required': True}, + 'type': {'required': True}, + 'command': {'required': True}, } _attribute_map = { - 'file_path': {'key': 'filePath', 'type': 'object'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'command': {'key': 'typeProperties.command', 'type': 'object'}, + 'command_timeout': {'key': 'typeProperties.commandTimeout', 'type': 'object'}, } def __init__( self, **kwargs ): - super(AzureMLWebServiceFile, self).__init__(**kwargs) - self.file_path = kwargs['file_path'] - self.linked_service_name = kwargs['linked_service_name'] + super(AzureDataExplorerCommandActivity, self).__init__(**kwargs) + self.type = 'AzureDataExplorerCommand' # type: str + self.command = kwargs['command'] + self.command_timeout = kwargs.get('command_timeout', None) -class AzureMySqlLinkedService(LinkedService): - """Azure MySQL database linked service. +class AzureDataExplorerLinkedService(LinkedService): + """Azure Data Explorer (Kusto) linked service. All required parameters must be populated in order to send to Azure. @@ -2506,22 +2608,33 @@ class AzureMySqlLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param connection_string: Required. The connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param password: The Azure key vault secret reference of password in connection string. - :type password: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with + :param endpoint: Required. The endpoint of Azure Data Explorer (the engine's endpoint). URL + will be in the format https://:code:``.:code:``.kusto.windows.net. + Type: string (or Expression with resultType string). + :type endpoint: object + :param service_principal_id: Required. The ID of the service principal used to authenticate + against Azure Data Explorer. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: Required. The key of the service principal used to authenticate + against Kusto. + :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase + :param database: Required. Database name for connection. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type database: object + :param tenant: Required. The name or ID of the tenant to which the service principal belongs. + Type: string (or Expression with resultType string). + :type tenant: object """ _validation = { 'type': {'required': True}, - 'connection_string': {'required': True}, - } - + 'endpoint': {'required': True}, + 'service_principal_id': {'required': True}, + 'service_principal_key': {'required': True}, + 'database': {'required': True}, + 'tenant': {'required': True}, + } + _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, @@ -2529,144 +2642,149 @@ class AzureMySqlLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, } def __init__( self, **kwargs ): - super(AzureMySqlLinkedService, self).__init__(**kwargs) - self.type = 'AzureMySql' - self.connection_string = kwargs['connection_string'] - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(AzureDataExplorerLinkedService, self).__init__(**kwargs) + self.type = 'AzureDataExplorer' # type: str + self.endpoint = kwargs['endpoint'] + self.service_principal_id = kwargs['service_principal_id'] + self.service_principal_key = kwargs['service_principal_key'] + self.database = kwargs['database'] + self.tenant = kwargs['tenant'] -class AzureMySqlTableDataset(Dataset): - """The Azure MySQL database dataset. +class AzureDataExplorerSink(CopySink): + """A copy activity Azure Data Explorer sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Copy sink type.Constant filled by server. :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The Azure MySQL database table name. Type: string (or Expression with - resultType string). - :type table_name: object - :param table: The name of Azure MySQL database table. Type: string (or Expression with - resultType string). - :type table: object + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param ingestion_mapping_name: A name of a pre-created csv mapping that was defined on the + target Kusto table. Type: string. + :type ingestion_mapping_name: object + :param ingestion_mapping_as_json: An explicit column mapping description provided in a json + format. Type: string. + :type ingestion_mapping_as_json: object + :param flush_immediately: If set to true, any aggregation will be skipped. Default is false. + Type: boolean. + :type flush_immediately: object """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'ingestion_mapping_name': {'key': 'ingestionMappingName', 'type': 'object'}, + 'ingestion_mapping_as_json': {'key': 'ingestionMappingAsJson', 'type': 'object'}, + 'flush_immediately': {'key': 'flushImmediately', 'type': 'object'}, } def __init__( self, **kwargs ): - super(AzureMySqlTableDataset, self).__init__(**kwargs) - self.type = 'AzureMySqlTable' - self.table_name = kwargs.get('table_name', None) - self.table = kwargs.get('table', None) + super(AzureDataExplorerSink, self).__init__(**kwargs) + self.type = 'AzureDataExplorerSink' # type: str + self.ingestion_mapping_name = kwargs.get('ingestion_mapping_name', None) + self.ingestion_mapping_as_json = kwargs.get('ingestion_mapping_as_json', None) + self.flush_immediately = kwargs.get('flush_immediately', None) -class AzurePostgreSqlLinkedService(LinkedService): - """Azure PostgreSQL linked service. +class AzureDataExplorerSource(CopySource): + """A copy activity Azure Data Explorer (Kusto) source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: An ODBC connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param password: The Azure key vault secret reference of password in connection string. - :type password: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query: Required. Database query. Should be a Kusto Query Language (KQL) query. Type: + string (or Expression with resultType string). + :type query: object + :param no_truncation: The name of the Boolean option that controls whether truncation is + applied to result-sets that go beyond a certain row-count limit. + :type no_truncation: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).. + :type query_timeout: object """ _validation = { 'type': {'required': True}, + 'query': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + 'no_truncation': {'key': 'noTruncation', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, } def __init__( self, **kwargs ): - super(AzurePostgreSqlLinkedService, self).__init__(**kwargs) - self.type = 'AzurePostgreSql' - self.connection_string = kwargs.get('connection_string', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(AzureDataExplorerSource, self).__init__(**kwargs) + self.type = 'AzureDataExplorerSource' # type: str + self.query = kwargs['query'] + self.no_truncation = kwargs.get('no_truncation', None) + self.query_timeout = kwargs.get('query_timeout', None) -class AzurePostgreSqlTableDataset(Dataset): - """Azure PostgreSQL dataset. +class AzureDataExplorerTableDataset(Dataset): + """The Azure Data Explorer (Kusto) dataset. All required parameters must be populated in order to send to Azure. @@ -2692,15 +2810,9 @@ class AzurePostgreSqlTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The table name of the Azure PostgreSQL database which includes both schema - and table. Type: string (or Expression with resultType string). - :type table_name: object - :param table: The table name of the Azure PostgreSQL database. Type: string (or Expression with - resultType string). + :param table: The table name of the Azure Data Explorer database. Type: string (or Expression + with resultType string). :type table: object - :param schema_type_properties_schema: The schema name of the Azure PostgreSQL database. Type: - string (or Expression with resultType string). - :type schema_type_properties_schema: object """ _validation = { @@ -2718,84 +2830,104 @@ class AzurePostgreSqlTableDataset(Dataset): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } def __init__( self, **kwargs ): - super(AzurePostgreSqlTableDataset, self).__init__(**kwargs) - self.type = 'AzurePostgreSqlTable' - self.table_name = kwargs.get('table_name', None) + super(AzureDataExplorerTableDataset, self).__init__(**kwargs) + self.type = 'AzureDataExplorerTable' # type: str self.table = kwargs.get('table', None) - self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) -class AzureSearchIndexDataset(Dataset): - """The Azure Search Index. +class AzureDataLakeAnalyticsLinkedService(LinkedService): + """Azure Data Lake Analytics linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Type of linked service.Constant filled by server. :type type: str - :param description: Dataset description. + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. + :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. + :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param index_name: Required. The name of the Azure Search Index. Type: string (or Expression - with resultType string). - :type index_name: object + :param account_name: Required. The Azure Data Lake Analytics account name. Type: string (or + Expression with resultType string). + :type account_name: object + :param service_principal_id: The ID of the application used to authenticate against the Azure + Data Lake Analytics account. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The Key of the application used to authenticate against the Azure + Data Lake Analytics account. + :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase + :param tenant: Required. The name or ID of the tenant to which the service principal belongs. + Type: string (or Expression with resultType string). + :type tenant: object + :param subscription_id: Data Lake Analytics account subscription ID (if different from Data + Factory account). Type: string (or Expression with resultType string). + :type subscription_id: object + :param resource_group_name: Data Lake Analytics account resource group name (if different from + Data Factory account). Type: string (or Expression with resultType string). + :type resource_group_name: object + :param data_lake_analytics_uri: Azure Data Lake Analytics URI Type: string (or Expression with + resultType string). + :type data_lake_analytics_uri: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, - 'index_name': {'required': True}, + 'account_name': {'required': True}, + 'tenant': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'index_name': {'key': 'typeProperties.indexName', 'type': 'object'}, + 'account_name': {'key': 'typeProperties.accountName', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'subscription_id': {'key': 'typeProperties.subscriptionId', 'type': 'object'}, + 'resource_group_name': {'key': 'typeProperties.resourceGroupName', 'type': 'object'}, + 'data_lake_analytics_uri': {'key': 'typeProperties.dataLakeAnalyticsUri', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, **kwargs ): - super(AzureSearchIndexDataset, self).__init__(**kwargs) - self.type = 'AzureSearchIndex' - self.index_name = kwargs['index_name'] + super(AzureDataLakeAnalyticsLinkedService, self).__init__(**kwargs) + self.type = 'AzureDataLakeAnalytics' # type: str + self.account_name = kwargs['account_name'] + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs['tenant'] + self.subscription_id = kwargs.get('subscription_id', None) + self.resource_group_name = kwargs.get('resource_group_name', None) + self.data_lake_analytics_uri = kwargs.get('data_lake_analytics_uri', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) -class AzureSearchLinkedService(LinkedService): - """Linked service for Windows Azure Search Service. +class AzureDataLakeStoreLinkedService(LinkedService): + """Azure Data Lake Store linked service. All required parameters must be populated in order to send to Azure. @@ -2812,11 +2944,27 @@ class AzureSearchLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param url: Required. URL for Azure Search service. Type: string (or Expression with resultType + :param data_lake_store_uri: Required. Data Lake Store service URI. Type: string (or Expression + with resultType string). + :type data_lake_store_uri: object + :param service_principal_id: The ID of the application used to authenticate against the Azure + Data Lake Store account. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The Key of the application used to authenticate against the Azure + Data Lake Store account. + :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal belongs. Type: + string (or Expression with resultType string). + :type tenant: object + :param account_name: Data Lake Store account name. Type: string (or Expression with resultType string). - :type url: object - :param key: Admin Key for Azure Search service. - :type key: ~azure.synapse.artifacts.models.SecretBase + :type account_name: object + :param subscription_id: Data Lake Store account subscription ID (if different from Data Factory + account). Type: string (or Expression with resultType string). + :type subscription_id: object + :param resource_group_name: Data Lake Store account resource group name (if different from Data + Factory account). Type: string (or Expression with resultType string). + :type resource_group_name: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -2825,7 +2973,7 @@ class AzureSearchLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'url': {'required': True}, + 'data_lake_store_uri': {'required': True}, } _attribute_map = { @@ -2835,8 +2983,13 @@ class AzureSearchLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'key': {'key': 'typeProperties.key', 'type': 'SecretBase'}, + 'data_lake_store_uri': {'key': 'typeProperties.dataLakeStoreUri', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'account_name': {'key': 'typeProperties.accountName', 'type': 'object'}, + 'subscription_id': {'key': 'typeProperties.subscriptionId', 'type': 'object'}, + 'resource_group_name': {'key': 'typeProperties.resourceGroupName', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -2844,438 +2997,334 @@ def __init__( self, **kwargs ): - super(AzureSearchLinkedService, self).__init__(**kwargs) - self.type = 'AzureSearch' - self.url = kwargs['url'] - self.key = kwargs.get('key', None) + super(AzureDataLakeStoreLinkedService, self).__init__(**kwargs) + self.type = 'AzureDataLakeStore' # type: str + self.data_lake_store_uri = kwargs['data_lake_store_uri'] + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs.get('tenant', None) + self.account_name = kwargs.get('account_name', None) + self.subscription_id = kwargs.get('subscription_id', None) + self.resource_group_name = kwargs.get('resource_group_name', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) -class AzureSqlDatabaseLinkedService(LinkedService): - """Microsoft Azure SQL Database linked service. +class AzureDataLakeStoreLocation(DatasetLocation): + """The location of azure data lake store dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Type of dataset storage location.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: Required. The connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param password: The Azure key vault secret reference of password in connection string. - :type password: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference - :param service_principal_id: The ID of the service principal used to authenticate against Azure - SQL Database. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The key of the service principal used to authenticate against - Azure SQL Database. - :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal belongs. Type: - string (or Expression with resultType string). - :type tenant: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with + :param folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + string). + :type file_name: object """ _validation = { 'type': {'required': True}, - 'connection_string': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, } def __init__( self, **kwargs ): - super(AzureSqlDatabaseLinkedService, self).__init__(**kwargs) - self.type = 'AzureSqlDatabase' - self.connection_string = kwargs['connection_string'] - self.password = kwargs.get('password', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.tenant = kwargs.get('tenant', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(AzureDataLakeStoreLocation, self).__init__(**kwargs) + self.type = 'AzureDataLakeStoreLocation' # type: str -class AzureSqlDWLinkedService(LinkedService): - """Azure SQL Data Warehouse linked service. +class AzureDataLakeStoreReadSettings(StoreReadSettings): + """Azure data lake store read settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. The read setting type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: Required. The connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param password: The Azure key vault secret reference of password in connection string. - :type password: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference - :param service_principal_id: The ID of the service principal used to authenticate against Azure - SQL Data Warehouse. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The key of the service principal used to authenticate against - Azure SQL Data Warehouse. - :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal belongs. Type: - string (or Expression with resultType string). - :type tenant: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :type recursive: object + :param wildcard_folder_path: ADLS wildcardFolderPath. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type wildcard_folder_path: object + :param wildcard_file_name: ADLS wildcardFileName. Type: string (or Expression with resultType + string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. Type: string (or + Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression + with resultType string). + :type modified_datetime_end: object """ _validation = { 'type': {'required': True}, - 'connection_string': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } def __init__( self, **kwargs ): - super(AzureSqlDWLinkedService, self).__init__(**kwargs) - self.type = 'AzureSqlDW' - self.connection_string = kwargs['connection_string'] - self.password = kwargs.get('password', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.tenant = kwargs.get('tenant', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(AzureDataLakeStoreReadSettings, self).__init__(**kwargs) + self.type = 'AzureDataLakeStoreReadSettings' # type: str + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) -class AzureSqlDWTableDataset(Dataset): - """The Azure SQL Data Warehouse dataset. +class AzureDataLakeStoreSink(CopySink): + """A copy activity Azure Data Lake Store sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Copy sink type.Constant filled by server. :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table - properties instead. - :type table_name: object - :param schema_type_properties_schema: The schema name of the Azure SQL Data Warehouse. Type: - string (or Expression with resultType string). - :type schema_type_properties_schema: object - :param table: The table name of the Azure SQL Data Warehouse. Type: string (or Expression with - resultType string). - :type table: object + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + :param enable_adls_single_file_parallel: Single File Parallel. + :type enable_adls_single_file_parallel: object """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'enable_adls_single_file_parallel': {'key': 'enableAdlsSingleFileParallel', 'type': 'object'}, } def __init__( self, **kwargs ): - super(AzureSqlDWTableDataset, self).__init__(**kwargs) - self.type = 'AzureSqlDWTable' - self.table_name = kwargs.get('table_name', None) - self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) - self.table = kwargs.get('table', None) + super(AzureDataLakeStoreSink, self).__init__(**kwargs) + self.type = 'AzureDataLakeStoreSink' # type: str + self.copy_behavior = kwargs.get('copy_behavior', None) + self.enable_adls_single_file_parallel = kwargs.get('enable_adls_single_file_parallel', None) -class AzureSqlMILinkedService(LinkedService): - """Azure SQL Managed Instance linked service. +class AzureDataLakeStoreSource(CopySource): + """A copy activity Azure Data Lake source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: Required. The connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param password: The Azure key vault secret reference of password in connection string. - :type password: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference - :param service_principal_id: The ID of the service principal used to authenticate against Azure - SQL Managed Instance. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The key of the service principal used to authenticate against - Azure SQL Managed Instance. - :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal belongs. Type: - string (or Expression with resultType string). - :type tenant: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :type recursive: object """ _validation = { 'type': {'required': True}, - 'connection_string': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, } def __init__( self, **kwargs ): - super(AzureSqlMILinkedService, self).__init__(**kwargs) - self.type = 'AzureSqlMI' - self.connection_string = kwargs['connection_string'] - self.password = kwargs.get('password', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.tenant = kwargs.get('tenant', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(AzureDataLakeStoreSource, self).__init__(**kwargs) + self.type = 'AzureDataLakeStoreSource' # type: str + self.recursive = kwargs.get('recursive', None) -class AzureSqlMITableDataset(Dataset): - """The Azure SQL Managed Instance dataset. +class AzureDataLakeStoreWriteSettings(StoreWriteSettings): + """Azure data lake store write settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. The write setting type.Constant filled by server. :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table - properties instead. - :type table_name: object - :param schema_type_properties_schema: The schema name of the Azure SQL Managed Instance. Type: - string (or Expression with resultType string). - :type schema_type_properties_schema: object - :param table: The table name of the Azure SQL Managed Instance dataset. Type: string (or - Expression with resultType string). - :type table: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } def __init__( self, **kwargs ): - super(AzureSqlMITableDataset, self).__init__(**kwargs) - self.type = 'AzureSqlMITable' - self.table_name = kwargs.get('table_name', None) - self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) - self.table = kwargs.get('table', None) + super(AzureDataLakeStoreWriteSettings, self).__init__(**kwargs) + self.type = 'AzureDataLakeStoreWriteSettings' # type: str -class AzureSqlTableDataset(Dataset): - """The Azure SQL Server database dataset. +class Resource(msrest.serialization.Model): + """Resource. - All required parameters must be populated in order to send to Azure. + Variables are only populated by the server, and will be ignored when sending a request. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table - properties instead. - :type table_name: object - :param schema_type_properties_schema: The schema name of the Azure SQL database. Type: string - (or Expression with resultType string). - :type schema_type_properties_schema: object - :param table: The table name of the Azure SQL database. Type: string (or Expression with - resultType string). - :type table: object + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, } def __init__( self, **kwargs ): - super(AzureSqlTableDataset, self).__init__(**kwargs) - self.type = 'AzureSqlTable' - self.table_name = kwargs.get('table_name', None) - self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) - self.table = kwargs.get('table', None) + super(Resource, self).__init__(**kwargs) + self.id = None + self.name = None + self.type = None -class AzureStorageLinkedService(LinkedService): - """The storage account linked service. +class AzureEntityResource(Resource): + """The resource model definition for a Azure Resource Manager resource with an etag. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + :ivar etag: Resource Etag. + :vartype etag: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureEntityResource, self).__init__(**kwargs) + self.etag = None + + +class AzureFileStorageLinkedService(LinkedService): + """Azure File Storage linked service. All required parameters must be populated in order to send to Azure. @@ -3292,24 +3341,23 @@ class AzureStorageLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param connection_string: The connection string. It is mutually exclusive with sasUri property. - Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param account_key: The Azure key vault secret reference of accountKey in connection string. - :type account_key: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference - :param sas_uri: SAS URI of the Azure Storage resource. It is mutually exclusive with - connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. - :type sas_uri: object - :param sas_token: The Azure key vault secret reference of sasToken in sas uri. - :type sas_token: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param host: Required. Host name of the server. Type: string (or Expression with resultType + string). + :type host: object + :param user_id: User ID to logon the server. Type: string (or Expression with resultType + string). + :type user_id: object + :param password: Password to logon the server. + :type password: ~azure.synapse.artifacts.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: str + :type encrypted_credential: object """ _validation = { 'type': {'required': True}, + 'host': {'required': True}, } _attribute_map = { @@ -3319,118 +3367,91 @@ class AzureStorageLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, - 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, - 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'user_id': {'key': 'typeProperties.userId', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, **kwargs ): - super(AzureStorageLinkedService, self).__init__(**kwargs) - self.type = 'AzureStorage' - self.connection_string = kwargs.get('connection_string', None) - self.account_key = kwargs.get('account_key', None) - self.sas_uri = kwargs.get('sas_uri', None) - self.sas_token = kwargs.get('sas_token', None) + super(AzureFileStorageLinkedService, self).__init__(**kwargs) + self.type = 'AzureFileStorage' # type: str + self.host = kwargs['host'] + self.user_id = kwargs.get('user_id', None) + self.password = kwargs.get('password', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) -class AzureTableDataset(Dataset): - """The Azure Table storage dataset. +class AzureFileStorageLocation(DatasetLocation): + """The location of file server dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Type of dataset storage location.Constant filled by server. :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: Required. The table name of the Azure Table storage. Type: string (or - Expression with resultType string). - :type table_name: object + :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + string). + :type file_name: object """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, - 'table_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, } def __init__( self, **kwargs ): - super(AzureTableDataset, self).__init__(**kwargs) - self.type = 'AzureTable' - self.table_name = kwargs['table_name'] + super(AzureFileStorageLocation, self).__init__(**kwargs) + self.type = 'AzureFileStorageLocation' # type: str -class AzureTableStorageLinkedService(LinkedService): - """The azure table storage linked service. +class AzureFileStorageReadSettings(StoreReadSettings): + """Azure File Storage read settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. The read setting type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: The connection string. It is mutually exclusive with sasUri property. - Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param account_key: The Azure key vault secret reference of accountKey in connection string. - :type account_key: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference - :param sas_uri: SAS URI of the Azure Storage resource. It is mutually exclusive with - connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. - :type sas_uri: object - :param sas_token: The Azure key vault secret reference of sasToken in sas uri. - :type sas_token: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: str + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :type recursive: object + :param wildcard_folder_path: Azure File Storage wildcardFolderPath. Type: string (or Expression + with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Azure File Storage wildcardFileName. Type: string (or Expression + with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. Type: string (or + Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression + with resultType string). + :type modified_datetime_end: object """ _validation = { @@ -3440,127 +3461,102 @@ class AzureTableStorageLinkedService(LinkedService): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, - 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, - 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureTableStorageLinkedService, self).__init__(**kwargs) - self.type = 'AzureTableStorage' - self.connection_string = kwargs.get('connection_string', None) - self.account_key = kwargs.get('account_key', None) - self.sas_uri = kwargs.get('sas_uri', None) - self.sas_token = kwargs.get('sas_token', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - - -class BigDataPoolReference(msrest.serialization.Model): - """Big data pool reference. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar type: Required. Big data pool reference type. Default value: "BigDataPoolReference". - :vartype type: str - :param reference_name: Required. Reference big data pool name. - :type reference_name: str - """ - - _validation = { - 'type': {'required': True, 'constant': True}, - 'reference_name': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'reference_name': {'key': 'referenceName', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } - type = "BigDataPoolReference" - def __init__( self, **kwargs ): - super(BigDataPoolReference, self).__init__(**kwargs) - self.reference_name = kwargs['reference_name'] + super(AzureFileStorageReadSettings, self).__init__(**kwargs) + self.type = 'AzureFileStorageReadSettings' # type: str + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) -class BinaryDataset(Dataset): - """Binary dataset. +class AzureFunctionActivity(ExecutionActivity): + """Azure Function activity. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. :type type: str - :param description: Dataset description. + :param description: Activity description. :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param location: The location of the Binary storage. - :type location: ~azure.synapse.artifacts.models.DatasetLocation - :param compression: The data compression method used for the binary dataset. - :type compression: ~azure.synapse.artifacts.models.DatasetCompression + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param method: Required. Rest API method for target endpoint. Possible values include: "GET", + "POST", "PUT", "DELETE", "OPTIONS", "HEAD", "TRACE". + :type method: str or ~azure.synapse.artifacts.models.AzureFunctionActivityMethod + :param function_name: Required. Name of the Function that the Azure Function Activity will + call. Type: string (or Expression with resultType string). + :type function_name: object + :param headers: Represents the headers that will be sent to the request. For example, to set + the language and type on a request: "headers" : { "Accept-Language": "en-us", "Content-Type": + "application/json" }. Type: string (or Expression with resultType string). + :type headers: object + :param body: Represents the payload that will be sent to the endpoint. Required for POST/PUT + method, not allowed for GET method Type: string (or Expression with resultType string). + :type body: object """ _validation = { + 'name': {'required': True}, 'type': {'required': True}, - 'linked_service_name': {'required': True}, + 'method': {'required': True}, + 'function_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'method': {'key': 'typeProperties.method', 'type': 'str'}, + 'function_name': {'key': 'typeProperties.functionName', 'type': 'object'}, + 'headers': {'key': 'typeProperties.headers', 'type': 'object'}, + 'body': {'key': 'typeProperties.body', 'type': 'object'}, } def __init__( self, **kwargs ): - super(BinaryDataset, self).__init__(**kwargs) - self.type = 'Binary' - self.location = kwargs.get('location', None) - self.compression = kwargs.get('compression', None) + super(AzureFunctionActivity, self).__init__(**kwargs) + self.type = 'AzureFunctionActivity' # type: str + self.method = kwargs['method'] + self.function_name = kwargs['function_name'] + self.headers = kwargs.get('headers', None) + self.body = kwargs.get('body', None) -class CassandraLinkedService(LinkedService): - """Linked service for Cassandra data source. +class AzureFunctionLinkedService(LinkedService): + """Azure Function linked service. All required parameters must be populated in order to send to Azure. @@ -3577,20 +3573,11 @@ class CassandraLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param host: Required. Host name for connection. Type: string (or Expression with resultType - string). - :type host: object - :param authentication_type: AuthenticationType to be used for connection. Type: string (or - Expression with resultType string). - :type authentication_type: object - :param port: The port for the connection. Type: integer (or Expression with resultType - integer). - :type port: object - :param username: Username for authentication. Type: string (or Expression with resultType - string). - :type username: object - :param password: Password for authentication. - :type password: ~azure.synapse.artifacts.models.SecretBase + :param function_app_url: Required. The endpoint of the Azure Function App. URL will be in the + format https://:code:``.azurewebsites.net. + :type function_app_url: object + :param function_key: Function or Host key for Azure Function App. + :type function_key: ~azure.synapse.artifacts.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -3599,7 +3586,7 @@ class CassandraLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'host': {'required': True}, + 'function_app_url': {'required': True}, } _attribute_map = { @@ -3609,11 +3596,8 @@ class CassandraLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'function_app_url': {'key': 'typeProperties.functionAppUrl', 'type': 'object'}, + 'function_key': {'key': 'typeProperties.functionKey', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -3621,179 +3605,135 @@ def __init__( self, **kwargs ): - super(CassandraLinkedService, self).__init__(**kwargs) - self.type = 'Cassandra' - self.host = kwargs['host'] - self.authentication_type = kwargs.get('authentication_type', None) - self.port = kwargs.get('port', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) + super(AzureFunctionLinkedService, self).__init__(**kwargs) + self.type = 'AzureFunction' # type: str + self.function_app_url = kwargs['function_app_url'] + self.function_key = kwargs.get('function_key', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) -class CassandraTableDataset(Dataset): - """The Cassandra database dataset. +class AzureKeyVaultLinkedService(LinkedService): + """Azure Key Vault linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Type of linked service.Constant filled by server. :type type: str - :param description: Dataset description. + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. + :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. + :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The table name of the Cassandra database. Type: string (or Expression with - resultType string). - :type table_name: object - :param keyspace: The keyspace of the Cassandra database. Type: string (or Expression with - resultType string). - :type keyspace: object + :param base_url: Required. The base URL of the Azure Key Vault. e.g. + https://myakv.vault.azure.net Type: string (or Expression with resultType string). + :type base_url: object """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, + 'base_url': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'keyspace': {'key': 'typeProperties.keyspace', 'type': 'object'}, + 'base_url': {'key': 'typeProperties.baseUrl', 'type': 'object'}, } def __init__( self, **kwargs ): - super(CassandraTableDataset, self).__init__(**kwargs) - self.type = 'CassandraTable' - self.table_name = kwargs.get('table_name', None) - self.keyspace = kwargs.get('keyspace', None) + super(AzureKeyVaultLinkedService, self).__init__(**kwargs) + self.type = 'AzureKeyVault' # type: str + self.base_url = kwargs['base_url'] -class CloudError(msrest.serialization.Model): - """The object that defines the structure of an Azure Synapse error response. +class SecretBase(msrest.serialization.Model): + """The base definition of a secret type. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AzureKeyVaultSecretReference, SecureString. All required parameters must be populated in order to send to Azure. - :param code: Required. Error code. - :type code: str - :param message: Required. Error message. - :type message: str - :param target: Property name/path in request associated with error. - :type target: str - :param details: Array with additional error details. - :type details: list[~azure.synapse.artifacts.models.CloudError] + :param type: Required. Type of the secret.Constant filled by server. + :type type: str """ _validation = { - 'code': {'required': True}, - 'message': {'required': True}, + 'type': {'required': True}, } _attribute_map = { - 'code': {'key': 'error.code', 'type': 'str'}, - 'message': {'key': 'error.message', 'type': 'str'}, - 'target': {'key': 'error.target', 'type': 'str'}, - 'details': {'key': 'error.details', 'type': '[CloudError]'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'AzureKeyVaultSecret': 'AzureKeyVaultSecretReference', 'SecureString': 'SecureString'} } def __init__( self, **kwargs ): - super(CloudError, self).__init__(**kwargs) - self.code = kwargs['code'] - self.message = kwargs['message'] - self.target = kwargs.get('target', None) - self.details = kwargs.get('details', None) + super(SecretBase, self).__init__(**kwargs) + self.type = None # type: Optional[str] -class CommonDataServiceForAppsEntityDataset(Dataset): - """The Common Data Service for Apps entity dataset. +class AzureKeyVaultSecretReference(SecretBase): + """Azure Key Vault secret reference. All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Type of the secret.Constant filled by server. :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param entity_name: The logical name of the entity. Type: string (or Expression with resultType - string). - :type entity_name: object + :param store: Required. The Azure Key Vault linked service reference. + :type store: ~azure.synapse.artifacts.models.LinkedServiceReference + :param secret_name: Required. The name of the secret in Azure Key Vault. Type: string (or + Expression with resultType string). + :type secret_name: object + :param secret_version: The version of the secret in Azure Key Vault. The default value is the + latest version of the secret. Type: string (or Expression with resultType string). + :type secret_version: object """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, + 'store': {'required': True}, + 'secret_name': {'required': True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, + 'store': {'key': 'store', 'type': 'LinkedServiceReference'}, + 'secret_name': {'key': 'secretName', 'type': 'object'}, + 'secret_version': {'key': 'secretVersion', 'type': 'object'}, } def __init__( self, **kwargs ): - super(CommonDataServiceForAppsEntityDataset, self).__init__(**kwargs) - self.type = 'CommonDataServiceForAppsEntity' - self.entity_name = kwargs.get('entity_name', None) + super(AzureKeyVaultSecretReference, self).__init__(**kwargs) + self.type = 'AzureKeyVaultSecret' # type: str + self.store = kwargs['store'] + self.secret_name = kwargs['secret_name'] + self.secret_version = kwargs.get('secret_version', None) -class CommonDataServiceForAppsLinkedService(LinkedService): - """Common Data Service for Apps linked service. +class AzureMariaDBLinkedService(LinkedService): + """Azure Database for MariaDB linked service. All required parameters must be populated in order to send to Azure. @@ -3810,54 +3750,11 @@ class CommonDataServiceForAppsLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param deployment_type: Required. The deployment type of the Common Data Service for Apps - instance. 'Online' for Common Data Service for Apps Online and 'OnPremisesWithIfd' for Common - Data Service for Apps on-premises with Ifd. Type: string (or Expression with resultType - string). Possible values include: "Online", "OnPremisesWithIfd". - :type deployment_type: str or ~azure.synapse.artifacts.models.DynamicsDeploymentType - :param host_name: The host name of the on-premises Common Data Service for Apps server. The - property is required for on-prem and not allowed for online. Type: string (or Expression with - resultType string). - :type host_name: object - :param port: The port of on-premises Common Data Service for Apps server. The property is - required for on-prem and not allowed for online. Default is 443. Type: integer (or Expression - with resultType integer), minimum: 0. - :type port: object - :param service_uri: The URL to the Microsoft Common Data Service for Apps server. The property - is required for on-line and not allowed for on-prem. Type: string (or Expression with - resultType string). - :type service_uri: object - :param organization_name: The organization name of the Common Data Service for Apps instance. - The property is required for on-prem and required for online when there are more than one - Common Data Service for Apps instances associated with the user. Type: string (or Expression - with resultType string). - :type organization_name: object - :param authentication_type: Required. The authentication type to connect to Common Data Service - for Apps server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario. - 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or - Expression with resultType string). Possible values include: "Office365", "Ifd", - "AADServicePrincipal". - :type authentication_type: str or ~azure.synapse.artifacts.models.DynamicsAuthenticationType - :param username: User name to access the Common Data Service for Apps instance. Type: string - (or Expression with resultType string). - :type username: object - :param password: Password to access the Common Data Service for Apps instance. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param service_principal_id: The client ID of the application in Azure Active Directory used - for Server-To-Server authentication. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_credential_type: The service principal credential type to use in - Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' - for certificate. Type: string (or Expression with resultType string). Possible values include: - "ServicePrincipalKey", "ServicePrincipalCert". - :type service_principal_credential_type: str or - ~azure.synapse.artifacts.models.DynamicsServicePrincipalCredentialType - :param service_principal_credential: The credential of the service principal object in Azure - Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', - servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If - servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only - be AzureKeyVaultSecretReference. - :type service_principal_credential: ~azure.synapse.artifacts.models.SecretBase + :param connection_string: An ODBC connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param pwd: The Azure key vault secret reference of password in connection string. + :type pwd: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -3866,8 +3763,6 @@ class CommonDataServiceForAppsLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'deployment_type': {'required': True}, - 'authentication_type': {'required': True}, } _attribute_map = { @@ -3877,17 +3772,8 @@ class CommonDataServiceForAppsLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, - 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, - 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'str'}, - 'service_principal_credential': {'key': 'typeProperties.servicePrincipalCredential', 'type': 'SecretBase'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -3895,102 +3781,65 @@ def __init__( self, **kwargs ): - super(CommonDataServiceForAppsLinkedService, self).__init__(**kwargs) - self.type = 'CommonDataServiceForApps' - self.deployment_type = kwargs['deployment_type'] - self.host_name = kwargs.get('host_name', None) - self.port = kwargs.get('port', None) - self.service_uri = kwargs.get('service_uri', None) - self.organization_name = kwargs.get('organization_name', None) - self.authentication_type = kwargs['authentication_type'] - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_credential_type = kwargs.get('service_principal_credential_type', None) - self.service_principal_credential = kwargs.get('service_principal_credential', None) + super(AzureMariaDBLinkedService, self).__init__(**kwargs) + self.type = 'AzureMariaDB' # type: str + self.connection_string = kwargs.get('connection_string', None) + self.pwd = kwargs.get('pwd', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) -class ConcurLinkedService(LinkedService): - """Concur Service linked service. +class AzureMariaDBSource(TabularSource): + """A copy activity Azure MariaDB source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param client_id: Required. Application client_id supplied by Concur App Management. - :type client_id: object - :param username: Required. The user name that you use to access Concur Service. - :type username: object - :param password: The password corresponding to the user name that you provided in the username - field. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using - HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name in the server's - certificate to match the host name of the server when connecting over SSL. The default value is - true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of the server when - connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object """ _validation = { 'type': {'required': True}, - 'client_id': {'required': True}, - 'username': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, } def __init__( self, **kwargs ): - super(ConcurLinkedService, self).__init__(**kwargs) - self.type = 'Concur' - self.client_id = kwargs['client_id'] - self.username = kwargs['username'] - self.password = kwargs.get('password', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(AzureMariaDBSource, self).__init__(**kwargs) + self.type = 'AzureMariaDBSource' # type: str + self.query = kwargs.get('query', None) -class ConcurObjectDataset(Dataset): - """Concur Service dataset. +class AzureMariaDBTableDataset(Dataset): + """Azure Database for MariaDB dataset. All required parameters must be populated in order to send to Azure. @@ -4042,13 +3891,13 @@ def __init__( self, **kwargs ): - super(ConcurObjectDataset, self).__init__(**kwargs) - self.type = 'ConcurObject' + super(AzureMariaDBTableDataset, self).__init__(**kwargs) + self.type = 'AzureMariaDBTable' # type: str self.table_name = kwargs.get('table_name', None) -class ControlActivity(Activity): - """Base class for all control activities like IfCondition, ForEach , Until. +class AzureMLBatchExecutionActivity(ExecutionActivity): + """Azure ML Batch Execution activity. All required parameters must be populated in order to send to Azure. @@ -4065,6 +3914,25 @@ class ControlActivity(Activity): :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param linked_service_name: Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param global_parameters: Key,Value pairs to be passed to the Azure ML Batch Execution Service + endpoint. Keys must match the names of web service parameters defined in the published Azure ML + web service. Values will be passed in the GlobalParameters property of the Azure ML batch + execution request. + :type global_parameters: dict[str, object] + :param web_service_outputs: Key,Value pairs, mapping the names of Azure ML endpoint's Web + Service Outputs to AzureMLWebServiceFile objects specifying the output Blob locations. This + information will be passed in the WebServiceOutputs property of the Azure ML batch execution + request. + :type web_service_outputs: dict[str, ~azure.synapse.artifacts.models.AzureMLWebServiceFile] + :param web_service_inputs: Key,Value pairs, mapping the names of Azure ML endpoint's Web + Service Inputs to AzureMLWebServiceFile objects specifying the input Blob locations.. This + information will be passed in the WebServiceInputs property of the Azure ML batch execution + request. + :type web_service_inputs: dict[str, ~azure.synapse.artifacts.models.AzureMLWebServiceFile] """ _validation = { @@ -4079,18 +3947,26 @@ class ControlActivity(Activity): 'description': {'key': 'description', 'type': 'str'}, 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - } - - def __init__( + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'global_parameters': {'key': 'typeProperties.globalParameters', 'type': '{object}'}, + 'web_service_outputs': {'key': 'typeProperties.webServiceOutputs', 'type': '{AzureMLWebServiceFile}'}, + 'web_service_inputs': {'key': 'typeProperties.webServiceInputs', 'type': '{AzureMLWebServiceFile}'}, + } + + def __init__( self, **kwargs ): - super(ControlActivity, self).__init__(**kwargs) - self.type = 'Container' + super(AzureMLBatchExecutionActivity, self).__init__(**kwargs) + self.type = 'AzureMLBatchExecution' # type: str + self.global_parameters = kwargs.get('global_parameters', None) + self.web_service_outputs = kwargs.get('web_service_outputs', None) + self.web_service_inputs = kwargs.get('web_service_inputs', None) -class CopyActivity(ExecutionActivity): - """Copy activity. +class AzureMLExecutePipelineActivity(ExecutionActivity): + """Azure ML Execute Pipeline activity. All required parameters must be populated in order to send to Azure. @@ -4111,46 +3987,33 @@ class CopyActivity(ExecutionActivity): :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param inputs: List of inputs for the activity. - :type inputs: list[~azure.synapse.artifacts.models.DatasetReference] - :param outputs: List of outputs for the activity. - :type outputs: list[~azure.synapse.artifacts.models.DatasetReference] - :param source: Required. Copy activity source. - :type source: ~azure.synapse.artifacts.models.CopySource - :param sink: Required. Copy activity sink. - :type sink: ~azure.synapse.artifacts.models.CopySink - :param translator: Copy activity translator. If not specified, tabular translator is used. - :type translator: object - :param enable_staging: Specifies whether to copy data via an interim staging. Default value is - false. Type: boolean (or Expression with resultType boolean). - :type enable_staging: object - :param staging_settings: Specifies interim staging settings when EnableStaging is true. - :type staging_settings: ~azure.synapse.artifacts.models.StagingSettings - :param parallel_copies: Maximum number of concurrent sessions opened on the source or sink to - avoid overloading the data store. Type: integer (or Expression with resultType integer), - minimum: 0. - :type parallel_copies: object - :param data_integration_units: Maximum number of data integration units that can be used to - perform this data movement. Type: integer (or Expression with resultType integer), minimum: 0. - :type data_integration_units: object - :param enable_skip_incompatible_row: Whether to skip incompatible row. Default value is false. - Type: boolean (or Expression with resultType boolean). - :type enable_skip_incompatible_row: object - :param redirect_incompatible_row_settings: Redirect incompatible row settings when - EnableSkipIncompatibleRow is true. - :type redirect_incompatible_row_settings: - ~azure.synapse.artifacts.models.RedirectIncompatibleRowSettings - :param preserve_rules: Preserve Rules. - :type preserve_rules: list[object] - :param preserve: Preserve rules. - :type preserve: list[object] + :param ml_pipeline_id: Required. ID of the published Azure ML pipeline. Type: string (or + Expression with resultType string). + :type ml_pipeline_id: object + :param experiment_name: Run history experiment name of the pipeline run. This information will + be passed in the ExperimentName property of the published pipeline execution request. Type: + string (or Expression with resultType string). + :type experiment_name: object + :param ml_pipeline_parameters: Key,Value pairs to be passed to the published Azure ML pipeline + endpoint. Keys must match the names of pipeline parameters defined in the published pipeline. + Values will be passed in the ParameterAssignments property of the published pipeline execution + request. Type: object with key value pairs (or Expression with resultType object). + :type ml_pipeline_parameters: object + :param ml_parent_run_id: The parent Azure ML Service pipeline run id. This information will be + passed in the ParentRunId property of the published pipeline execution request. Type: string + (or Expression with resultType string). + :type ml_parent_run_id: object + :param continue_on_step_failure: Whether to continue execution of other steps in the + PipelineRun if a step fails. This information will be passed in the continueOnStepFailure + property of the published pipeline execution request. Type: boolean (or Expression with + resultType boolean). + :type continue_on_step_failure: object """ _validation = { 'name': {'required': True}, 'type': {'required': True}, - 'source': {'required': True}, - 'sink': {'required': True}, + 'ml_pipeline_id': {'required': True}, } _attribute_map = { @@ -4162,284 +4025,287 @@ class CopyActivity(ExecutionActivity): 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'inputs': {'key': 'inputs', 'type': '[DatasetReference]'}, - 'outputs': {'key': 'outputs', 'type': '[DatasetReference]'}, - 'source': {'key': 'typeProperties.source', 'type': 'CopySource'}, - 'sink': {'key': 'typeProperties.sink', 'type': 'CopySink'}, - 'translator': {'key': 'typeProperties.translator', 'type': 'object'}, - 'enable_staging': {'key': 'typeProperties.enableStaging', 'type': 'object'}, - 'staging_settings': {'key': 'typeProperties.stagingSettings', 'type': 'StagingSettings'}, - 'parallel_copies': {'key': 'typeProperties.parallelCopies', 'type': 'object'}, - 'data_integration_units': {'key': 'typeProperties.dataIntegrationUnits', 'type': 'object'}, - 'enable_skip_incompatible_row': {'key': 'typeProperties.enableSkipIncompatibleRow', 'type': 'object'}, - 'redirect_incompatible_row_settings': {'key': 'typeProperties.redirectIncompatibleRowSettings', 'type': 'RedirectIncompatibleRowSettings'}, - 'preserve_rules': {'key': 'typeProperties.preserveRules', 'type': '[object]'}, - 'preserve': {'key': 'typeProperties.preserve', 'type': '[object]'}, + 'ml_pipeline_id': {'key': 'typeProperties.mlPipelineId', 'type': 'object'}, + 'experiment_name': {'key': 'typeProperties.experimentName', 'type': 'object'}, + 'ml_pipeline_parameters': {'key': 'typeProperties.mlPipelineParameters', 'type': 'object'}, + 'ml_parent_run_id': {'key': 'typeProperties.mlParentRunId', 'type': 'object'}, + 'continue_on_step_failure': {'key': 'typeProperties.continueOnStepFailure', 'type': 'object'}, } def __init__( self, **kwargs ): - super(CopyActivity, self).__init__(**kwargs) - self.type = 'Copy' - self.inputs = kwargs.get('inputs', None) - self.outputs = kwargs.get('outputs', None) - self.source = kwargs['source'] - self.sink = kwargs['sink'] - self.translator = kwargs.get('translator', None) - self.enable_staging = kwargs.get('enable_staging', None) - self.staging_settings = kwargs.get('staging_settings', None) - self.parallel_copies = kwargs.get('parallel_copies', None) - self.data_integration_units = kwargs.get('data_integration_units', None) - self.enable_skip_incompatible_row = kwargs.get('enable_skip_incompatible_row', None) - self.redirect_incompatible_row_settings = kwargs.get('redirect_incompatible_row_settings', None) - self.preserve_rules = kwargs.get('preserve_rules', None) - self.preserve = kwargs.get('preserve', None) - + super(AzureMLExecutePipelineActivity, self).__init__(**kwargs) + self.type = 'AzureMLExecutePipeline' # type: str + self.ml_pipeline_id = kwargs['ml_pipeline_id'] + self.experiment_name = kwargs.get('experiment_name', None) + self.ml_pipeline_parameters = kwargs.get('ml_pipeline_parameters', None) + self.ml_parent_run_id = kwargs.get('ml_parent_run_id', None) + self.continue_on_step_failure = kwargs.get('continue_on_step_failure', None) -class CopySink(msrest.serialization.Model): - """A copy activity sink. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: . +class AzureMLLinkedService(LinkedService): + """Azure ML Studio Web Service linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Copy sink type.Constant filled by server. + :param type: Required. Type of linked service.Constant filled by server. :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType - integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType - integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the sink data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param ml_endpoint: Required. The Batch Execution REST URL for an Azure ML Studio Web Service + endpoint. Type: string (or Expression with resultType string). + :type ml_endpoint: object + :param api_key: Required. The API key for accessing the Azure ML model endpoint. + :type api_key: ~azure.synapse.artifacts.models.SecretBase + :param update_resource_endpoint: The Update Resource REST URL for an Azure ML Studio Web + Service endpoint. Type: string (or Expression with resultType string). + :type update_resource_endpoint: object + :param service_principal_id: The ID of the service principal used to authenticate against the + ARM-based updateResourceEndpoint of an Azure ML Studio web service. Type: string (or Expression + with resultType string). + :type service_principal_id: object + :param service_principal_key: The key of the service principal used to authenticate against the + ARM-based updateResourceEndpoint of an Azure ML Studio web service. + :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal belongs. Type: + string (or Expression with resultType string). + :type tenant: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object """ _validation = { 'type': {'required': True}, + 'ml_endpoint': {'required': True}, + 'api_key': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - } - - _subtype_map = { - 'type': {} + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'ml_endpoint': {'key': 'typeProperties.mlEndpoint', 'type': 'object'}, + 'api_key': {'key': 'typeProperties.apiKey', 'type': 'SecretBase'}, + 'update_resource_endpoint': {'key': 'typeProperties.updateResourceEndpoint', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, **kwargs ): - super(CopySink, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.type = 'CopySink' - self.write_batch_size = kwargs.get('write_batch_size', None) - self.write_batch_timeout = kwargs.get('write_batch_timeout', None) - self.sink_retry_count = kwargs.get('sink_retry_count', None) - self.sink_retry_wait = kwargs.get('sink_retry_wait', None) - self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) - + super(AzureMLLinkedService, self).__init__(**kwargs) + self.type = 'AzureML' # type: str + self.ml_endpoint = kwargs['ml_endpoint'] + self.api_key = kwargs['api_key'] + self.update_resource_endpoint = kwargs.get('update_resource_endpoint', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs.get('tenant', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) -class CopySource(msrest.serialization.Model): - """A copy activity source. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: . +class AzureMLServiceLinkedService(LinkedService): + """Azure ML Service linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. + :param type: Required. Type of linked service.Constant filled by server. :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param subscription_id: Required. Azure ML Service workspace subscription ID. Type: string (or + Expression with resultType string). + :type subscription_id: object + :param resource_group_name: Required. Azure ML Service workspace resource group name. Type: + string (or Expression with resultType string). + :type resource_group_name: object + :param ml_workspace_name: Required. Azure ML Service workspace name. Type: string (or + Expression with resultType string). + :type ml_workspace_name: object + :param service_principal_id: The ID of the service principal used to authenticate against the + endpoint of a published Azure ML Service pipeline. Type: string (or Expression with resultType + string). + :type service_principal_id: object + :param service_principal_key: The key of the service principal used to authenticate against the + endpoint of a published Azure ML Service pipeline. + :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal belongs. Type: + string (or Expression with resultType string). + :type tenant: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object """ _validation = { 'type': {'required': True}, + 'subscription_id': {'required': True}, + 'resource_group_name': {'required': True}, + 'ml_workspace_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - } - - _subtype_map = { - 'type': {} + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'subscription_id': {'key': 'typeProperties.subscriptionId', 'type': 'object'}, + 'resource_group_name': {'key': 'typeProperties.resourceGroupName', 'type': 'object'}, + 'ml_workspace_name': {'key': 'typeProperties.mlWorkspaceName', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, **kwargs ): - super(CopySource, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.type = 'CopySource' - self.source_retry_count = kwargs.get('source_retry_count', None) - self.source_retry_wait = kwargs.get('source_retry_wait', None) - self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) + super(AzureMLServiceLinkedService, self).__init__(**kwargs) + self.type = 'AzureMLService' # type: str + self.subscription_id = kwargs['subscription_id'] + self.resource_group_name = kwargs['resource_group_name'] + self.ml_workspace_name = kwargs['ml_workspace_name'] + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs.get('tenant', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) -class CosmosDbLinkedService(LinkedService): - """Microsoft Azure Cosmos Database (CosmosDB) linked service. +class AzureMLUpdateResourceActivity(ExecutionActivity): + """Azure ML Update Resource management activity. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. + :param description: Activity description. :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: The connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param account_endpoint: The endpoint of the Azure CosmosDB account. Type: string (or - Expression with resultType string). - :type account_endpoint: object - :param database: The name of the database. Type: string (or Expression with resultType string). - :type database: object - :param account_key: The account key of the Azure CosmosDB account. Type: SecureString or - AzureKeyVaultSecretReference. - :type account_key: ~azure.synapse.artifacts.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param linked_service_name: Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param trained_model_name: Required. Name of the Trained Model module in the Web Service + experiment to be updated. Type: string (or Expression with resultType string). + :type trained_model_name: object + :param trained_model_linked_service_name: Required. Name of Azure Storage linked service + holding the .ilearner file that will be uploaded by the update operation. + :type trained_model_linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param trained_model_file_path: Required. The relative file path in trainedModelLinkedService + to represent the .ilearner file that will be uploaded by the update operation. Type: string + (or Expression with resultType string). + :type trained_model_file_path: object """ _validation = { + 'name': {'required': True}, 'type': {'required': True}, + 'trained_model_name': {'required': True}, + 'trained_model_linked_service_name': {'required': True}, + 'trained_model_file_path': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'account_endpoint': {'key': 'typeProperties.accountEndpoint', 'type': 'object'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, - 'account_key': {'key': 'typeProperties.accountKey', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'trained_model_name': {'key': 'typeProperties.trainedModelName', 'type': 'object'}, + 'trained_model_linked_service_name': {'key': 'typeProperties.trainedModelLinkedServiceName', 'type': 'LinkedServiceReference'}, + 'trained_model_file_path': {'key': 'typeProperties.trainedModelFilePath', 'type': 'object'}, } def __init__( self, **kwargs ): - super(CosmosDbLinkedService, self).__init__(**kwargs) - self.type = 'CosmosDb' - self.connection_string = kwargs.get('connection_string', None) - self.account_endpoint = kwargs.get('account_endpoint', None) - self.database = kwargs.get('database', None) - self.account_key = kwargs.get('account_key', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(AzureMLUpdateResourceActivity, self).__init__(**kwargs) + self.type = 'AzureMLUpdateResource' # type: str + self.trained_model_name = kwargs['trained_model_name'] + self.trained_model_linked_service_name = kwargs['trained_model_linked_service_name'] + self.trained_model_file_path = kwargs['trained_model_file_path'] -class CosmosDbMongoDbApiCollectionDataset(Dataset): - """The CosmosDB (MongoDB API) database dataset. +class AzureMLWebServiceFile(msrest.serialization.Model): + """Azure ML WebService Input/Output file. All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. + :param file_path: Required. The relative file path, including container name, in the Azure Blob + Storage specified by the LinkedService. Type: string (or Expression with resultType string). + :type file_path: object + :param linked_service_name: Required. Reference to an Azure Storage LinkedService, where Azure + ML WebService Input/Output file located. :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param collection: Required. The collection name of the CosmosDB (MongoDB API) database. Type: - string (or Expression with resultType string). - :type collection: object """ _validation = { - 'type': {'required': True}, + 'file_path': {'required': True}, 'linked_service_name': {'required': True}, - 'collection': {'required': True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, + 'file_path': {'key': 'filePath', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, } def __init__( self, **kwargs ): - super(CosmosDbMongoDbApiCollectionDataset, self).__init__(**kwargs) - self.type = 'CosmosDbMongoDbApiCollection' - self.collection = kwargs['collection'] + super(AzureMLWebServiceFile, self).__init__(**kwargs) + self.file_path = kwargs['file_path'] + self.linked_service_name = kwargs['linked_service_name'] -class CosmosDbMongoDbApiLinkedService(LinkedService): - """Linked service for CosmosDB (MongoDB API) data source. +class AzureMySqlLinkedService(LinkedService): + """Azure MySQL database linked service. All required parameters must be populated in order to send to Azure. @@ -4456,19 +4322,20 @@ class CosmosDbMongoDbApiLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param connection_string: Required. The CosmosDB (MongoDB API) connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or + :param connection_string: Required. The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object - :param database: Required. The name of the CosmosDB (MongoDB API) database that you want to - access. Type: string (or Expression with resultType string). - :type database: object + :param password: The Azure key vault secret reference of password in connection string. + :type password: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object """ _validation = { 'type': {'required': True}, 'connection_string': {'required': True}, - 'database': {'required': True}, } _attribute_map = { @@ -4479,106 +4346,99 @@ class CosmosDbMongoDbApiLinkedService(LinkedService): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, **kwargs ): - super(CosmosDbMongoDbApiLinkedService, self).__init__(**kwargs) - self.type = 'CosmosDbMongoDbApi' + super(AzureMySqlLinkedService, self).__init__(**kwargs) + self.type = 'AzureMySql' # type: str self.connection_string = kwargs['connection_string'] - self.database = kwargs['database'] + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) -class CosmosDbSqlApiCollectionDataset(Dataset): - """Microsoft Azure CosmosDB (SQL API) Collection dataset. +class AzureMySqlSink(CopySink): + """A copy activity Azure MySql sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Copy sink type.Constant filled by server. :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param collection_name: Required. CosmosDB (SQL API) collection name. Type: string (or + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param pre_copy_script: A query to execute before starting the copy. Type: string (or Expression with resultType string). - :type collection_name: object + :type pre_copy_script: object """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, - 'collection_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'collection_name': {'key': 'typeProperties.collectionName', 'type': 'object'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } def __init__( self, **kwargs ): - super(CosmosDbSqlApiCollectionDataset, self).__init__(**kwargs) - self.type = 'CosmosDbSqlApiCollection' - self.collection_name = kwargs['collection_name'] + super(AzureMySqlSink, self).__init__(**kwargs) + self.type = 'AzureMySqlSink' # type: str + self.pre_copy_script = kwargs.get('pre_copy_script', None) -class CouchbaseLinkedService(LinkedService): - """Couchbase server linked service. +class AzureMySqlSource(TabularSource): + """A copy activity Azure MySQL source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: An ODBC connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param cred_string: The Azure key vault secret reference of credString in connection string. - :type cred_string: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: Database query. Type: string (or Expression with resultType string). + :type query: object """ _validation = { @@ -4588,28 +4448,24 @@ class CouchbaseLinkedService(LinkedService): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'cred_string': {'key': 'typeProperties.credString', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, } def __init__( self, **kwargs ): - super(CouchbaseLinkedService, self).__init__(**kwargs) - self.type = 'Couchbase' - self.connection_string = kwargs.get('connection_string', None) - self.cred_string = kwargs.get('cred_string', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(AzureMySqlSource, self).__init__(**kwargs) + self.type = 'AzureMySqlSource' # type: str + self.query = kwargs.get('query', None) -class CouchbaseTableDataset(Dataset): - """Couchbase server dataset. +class AzureMySqlTableDataset(Dataset): + """The Azure MySQL database dataset. All required parameters must be populated in order to send to Azure. @@ -4635,8 +4491,12 @@ class CouchbaseTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). + :param table_name: The Azure MySQL database table name. Type: string (or Expression with + resultType string). :type table_name: object + :param table: The name of Azure MySQL database table. Type: string (or Expression with + resultType string). + :type table: object """ _validation = { @@ -4655,1232 +4515,1630 @@ class CouchbaseTableDataset(Dataset): 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, } def __init__( self, **kwargs ): - super(CouchbaseTableDataset, self).__init__(**kwargs) - self.type = 'CouchbaseTable' + super(AzureMySqlTableDataset, self).__init__(**kwargs) + self.type = 'AzureMySqlTable' # type: str self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) -class CreateDataFlowDebugSessionRequest(msrest.serialization.Model): - """Request body structure for creating data flow debug session. +class AzurePostgreSqlLinkedService(LinkedService): + """Azure PostgreSQL linked service. - :param data_flow_name: The name of the data flow. - :type data_flow_name: str - :param existing_cluster_id: The ID of existing Databricks cluster. - :type existing_cluster_id: str - :param cluster_timeout: Timeout setting for Databricks cluster. - :type cluster_timeout: int - :param new_cluster_name: The name of new Databricks cluster. - :type new_cluster_name: str - :param new_cluster_node_type: The type of new Databricks cluster. - :type new_cluster_node_type: str - :param data_bricks_linked_service: Data bricks linked service. - :type data_bricks_linked_service: ~azure.synapse.artifacts.models.LinkedServiceResource + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param connection_string: An ODBC connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param password: The Azure key vault secret reference of password in connection string. + :type password: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object """ + _validation = { + 'type': {'required': True}, + } + _attribute_map = { - 'data_flow_name': {'key': 'dataFlowName', 'type': 'str'}, - 'existing_cluster_id': {'key': 'existingClusterId', 'type': 'str'}, - 'cluster_timeout': {'key': 'clusterTimeout', 'type': 'int'}, - 'new_cluster_name': {'key': 'newClusterName', 'type': 'str'}, - 'new_cluster_node_type': {'key': 'newClusterNodeType', 'type': 'str'}, - 'data_bricks_linked_service': {'key': 'dataBricksLinkedService', 'type': 'LinkedServiceResource'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, **kwargs ): - super(CreateDataFlowDebugSessionRequest, self).__init__(**kwargs) - self.data_flow_name = kwargs.get('data_flow_name', None) - self.existing_cluster_id = kwargs.get('existing_cluster_id', None) - self.cluster_timeout = kwargs.get('cluster_timeout', None) - self.new_cluster_name = kwargs.get('new_cluster_name', None) - self.new_cluster_node_type = kwargs.get('new_cluster_node_type', None) - self.data_bricks_linked_service = kwargs.get('data_bricks_linked_service', None) + super(AzurePostgreSqlLinkedService, self).__init__(**kwargs) + self.type = 'AzurePostgreSql' # type: str + self.connection_string = kwargs.get('connection_string', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) -class CreateDataFlowDebugSessionResponse(msrest.serialization.Model): - """Response body structure for creating data flow debug session. +class AzurePostgreSqlSink(CopySink): + """A copy activity Azure PostgreSQL sink. - :param session_id: The ID of data flow debug session. - :type session_id: str + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param pre_copy_script: A query to execute before starting the copy. Type: string (or + Expression with resultType string). + :type pre_copy_script: object """ + _validation = { + 'type': {'required': True}, + } + _attribute_map = { - 'session_id': {'key': 'sessionId', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } def __init__( self, **kwargs ): - super(CreateDataFlowDebugSessionResponse, self).__init__(**kwargs) - self.session_id = kwargs.get('session_id', None) + super(AzurePostgreSqlSink, self).__init__(**kwargs) + self.type = 'AzurePostgreSqlSink' # type: str + self.pre_copy_script = kwargs.get('pre_copy_script', None) -class CreateRunResponse(msrest.serialization.Model): - """Response body with a run identifier. +class AzurePostgreSqlSource(TabularSource): + """A copy activity Azure PostgreSQL source. All required parameters must be populated in order to send to Azure. - :param run_id: Required. Identifier of a run. - :type run_id: str + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object """ _validation = { - 'run_id': {'required': True}, + 'type': {'required': True}, } _attribute_map = { - 'run_id': {'key': 'runId', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, } def __init__( self, **kwargs ): - super(CreateRunResponse, self).__init__(**kwargs) - self.run_id = kwargs['run_id'] + super(AzurePostgreSqlSource, self).__init__(**kwargs) + self.type = 'AzurePostgreSqlSource' # type: str + self.query = kwargs.get('query', None) -class CustomActivity(ExecutionActivity): - """Custom activity type. +class AzurePostgreSqlTableDataset(Dataset): + """Azure PostgreSQL dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. Type of dataset.Constant filled by server. :type type: str - :param description: Activity description. + :param description: Dataset description. :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param command: Required. Command for custom activity Type: string (or Expression with + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: The table name of the Azure PostgreSQL database which includes both schema + and table. Type: string (or Expression with resultType string). + :type table_name: object + :param table: The table name of the Azure PostgreSQL database. Type: string (or Expression with resultType string). - :type command: object - :param resource_linked_service: Resource linked service reference. - :type resource_linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference - :param folder_path: Folder path for resource files Type: string (or Expression with resultType - string). - :type folder_path: object - :param reference_objects: Reference objects. - :type reference_objects: ~azure.synapse.artifacts.models.CustomActivityReferenceObject - :param extended_properties: User defined property bag. There is no restriction on the keys or - values that can be used. The user specified custom activity has the full responsibility to - consume and interpret the content defined. - :type extended_properties: dict[str, object] - :param retention_time_in_days: The retention time for the files submitted for custom activity. - Type: double (or Expression with resultType double). - :type retention_time_in_days: object + :type table: object + :param schema_type_properties_schema: The schema name of the Azure PostgreSQL database. Type: + string (or Expression with resultType string). + :type schema_type_properties_schema: object """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, - 'command': {'required': True}, + 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'command': {'key': 'typeProperties.command', 'type': 'object'}, - 'resource_linked_service': {'key': 'typeProperties.resourceLinkedService', 'type': 'LinkedServiceReference'}, - 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, - 'reference_objects': {'key': 'typeProperties.referenceObjects', 'type': 'CustomActivityReferenceObject'}, - 'extended_properties': {'key': 'typeProperties.extendedProperties', 'type': '{object}'}, - 'retention_time_in_days': {'key': 'typeProperties.retentionTimeInDays', 'type': 'object'}, - } - + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + def __init__( self, **kwargs ): - super(CustomActivity, self).__init__(**kwargs) - self.type = 'Custom' - self.command = kwargs['command'] - self.resource_linked_service = kwargs.get('resource_linked_service', None) - self.folder_path = kwargs.get('folder_path', None) - self.reference_objects = kwargs.get('reference_objects', None) - self.extended_properties = kwargs.get('extended_properties', None) - self.retention_time_in_days = kwargs.get('retention_time_in_days', None) + super(AzurePostgreSqlTableDataset, self).__init__(**kwargs) + self.type = 'AzurePostgreSqlTable' # type: str + self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) -class CustomActivityReferenceObject(msrest.serialization.Model): - """Reference objects for custom activity. +class AzureQueueSink(CopySink): + """A copy activity Azure Queue sink. - :param linked_services: Linked service references. - :type linked_services: list[~azure.synapse.artifacts.models.LinkedServiceReference] - :param datasets: Dataset references. - :type datasets: list[~azure.synapse.artifacts.models.DatasetReference] + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object """ + _validation = { + 'type': {'required': True}, + } + _attribute_map = { - 'linked_services': {'key': 'linkedServices', 'type': '[LinkedServiceReference]'}, - 'datasets': {'key': 'datasets', 'type': '[DatasetReference]'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, } def __init__( self, **kwargs ): - super(CustomActivityReferenceObject, self).__init__(**kwargs) - self.linked_services = kwargs.get('linked_services', None) - self.datasets = kwargs.get('datasets', None) + super(AzureQueueSink, self).__init__(**kwargs) + self.type = 'AzureQueueSink' # type: str -class CustomDataSourceLinkedService(LinkedService): - """Custom linked service. +class AzureSearchIndexDataset(Dataset): + """The Azure Search Index. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Type of dataset.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. + :param description: Dataset description. :type description: str - :param parameters: Parameters for linked service. + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. + :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] - :param type_properties: Required. Custom linked service properties. - :type type_properties: object + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param index_name: Required. The name of the Azure Search Index. Type: string (or Expression + with resultType string). + :type index_name: object """ _validation = { 'type': {'required': True}, - 'type_properties': {'required': True}, + 'linked_service_name': {'required': True}, + 'index_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type_properties': {'key': 'typeProperties', 'type': 'object'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'index_name': {'key': 'typeProperties.indexName', 'type': 'object'}, } def __init__( self, **kwargs ): - super(CustomDataSourceLinkedService, self).__init__(**kwargs) - self.type = 'CustomDataSource' - self.type_properties = kwargs['type_properties'] + super(AzureSearchIndexDataset, self).__init__(**kwargs) + self.type = 'AzureSearchIndex' # type: str + self.index_name = kwargs['index_name'] -class DatabricksNotebookActivity(ExecutionActivity): - """DatabricksNotebook activity. +class AzureSearchIndexSink(CopySink): + """A copy activity Azure Search Index sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. Copy sink type.Constant filled by server. :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param notebook_path: Required. The absolute path of the notebook to be run in the Databricks - Workspace. This path must begin with a slash. Type: string (or Expression with resultType - string). - :type notebook_path: object - :param base_parameters: Base parameters to be used for each run of this job.If the notebook - takes a parameter that is not specified, the default value from the notebook will be used. - :type base_parameters: dict[str, object] - :param libraries: A list of libraries to be installed on the cluster that will execute the job. - :type libraries: list[dict[str, object]] + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param write_behavior: Specify the write behavior when upserting documents into Azure Search + Index. Possible values include: "Merge", "Upload". + :type write_behavior: str or ~azure.synapse.artifacts.models.AzureSearchIndexWriteBehaviorType """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, - 'notebook_path': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'notebook_path': {'key': 'typeProperties.notebookPath', 'type': 'object'}, - 'base_parameters': {'key': 'typeProperties.baseParameters', 'type': '{object}'}, - 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, } def __init__( self, **kwargs ): - super(DatabricksNotebookActivity, self).__init__(**kwargs) - self.type = 'DatabricksNotebook' - self.notebook_path = kwargs['notebook_path'] - self.base_parameters = kwargs.get('base_parameters', None) - self.libraries = kwargs.get('libraries', None) + super(AzureSearchIndexSink, self).__init__(**kwargs) + self.type = 'AzureSearchIndexSink' # type: str + self.write_behavior = kwargs.get('write_behavior', None) -class DatabricksSparkJarActivity(ExecutionActivity): - """DatabricksSparkJar activity. +class AzureSearchLinkedService(LinkedService): + """Linked service for Windows Azure Search Service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. Type of linked service.Constant filled by server. :type type: str - :param description: Activity description. + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param main_class_name: Required. The full name of the class containing the main method to be - executed. This class must be contained in a JAR provided as a library. Type: string (or - Expression with resultType string). - :type main_class_name: object - :param parameters: Parameters that will be passed to the main method. - :type parameters: list[object] - :param libraries: A list of libraries to be installed on the cluster that will execute the job. - :type libraries: list[dict[str, object]] + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param url: Required. URL for Azure Search service. Type: string (or Expression with resultType + string). + :type url: object + :param key: Admin Key for Azure Search service. + :type key: ~azure.synapse.artifacts.models.SecretBase + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, - 'main_class_name': {'required': True}, + 'url': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'main_class_name': {'key': 'typeProperties.mainClassName', 'type': 'object'}, - 'parameters': {'key': 'typeProperties.parameters', 'type': '[object]'}, - 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'key': {'key': 'typeProperties.key', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, **kwargs ): - super(DatabricksSparkJarActivity, self).__init__(**kwargs) - self.type = 'DatabricksSparkJar' - self.main_class_name = kwargs['main_class_name'] - self.parameters = kwargs.get('parameters', None) - self.libraries = kwargs.get('libraries', None) + super(AzureSearchLinkedService, self).__init__(**kwargs) + self.type = 'AzureSearch' # type: str + self.url = kwargs['url'] + self.key = kwargs.get('key', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) -class DatabricksSparkPythonActivity(ExecutionActivity): - """DatabricksSparkPython activity. +class AzureSqlDatabaseLinkedService(LinkedService): + """Microsoft Azure SQL Database linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. Type of linked service.Constant filled by server. :type type: str - :param description: Activity description. + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param python_file: Required. The URI of the Python file to be executed. DBFS paths are - supported. Type: string (or Expression with resultType string). - :type python_file: object - :param parameters: Command line parameters that will be passed to the Python file. - :type parameters: list[object] - :param libraries: A list of libraries to be installed on the cluster that will execute the job. - :type libraries: list[dict[str, object]] + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param connection_string: Required. The connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param password: The Azure key vault secret reference of password in connection string. + :type password: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param service_principal_id: The ID of the service principal used to authenticate against Azure + SQL Database. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The key of the service principal used to authenticate against + Azure SQL Database. + :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal belongs. Type: + string (or Expression with resultType string). + :type tenant: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, - 'python_file': {'required': True}, + 'connection_string': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'python_file': {'key': 'typeProperties.pythonFile', 'type': 'object'}, - 'parameters': {'key': 'typeProperties.parameters', 'type': '[object]'}, - 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, **kwargs ): - super(DatabricksSparkPythonActivity, self).__init__(**kwargs) - self.type = 'DatabricksSparkPython' - self.python_file = kwargs['python_file'] - self.parameters = kwargs.get('parameters', None) - self.libraries = kwargs.get('libraries', None) - + super(AzureSqlDatabaseLinkedService, self).__init__(**kwargs) + self.type = 'AzureSqlDatabase' # type: str + self.connection_string = kwargs['connection_string'] + self.password = kwargs.get('password', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs.get('tenant', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) -class DataFlow(msrest.serialization.Model): - """Azure Synapse nested object which contains a flow with data movements and transformations. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: MappingDataFlow. +class AzureSqlDWLinkedService(LinkedService): + """Azure SQL Data Warehouse linked service. All required parameters must be populated in order to send to Azure. - :param type: Required. Type of data flow.Constant filled by server. + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. :type type: str - :param description: The description of the data flow. + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. :type description: str - :param annotations: List of tags that can be used for describing the data flow. + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param folder: The folder that this data flow is in. If not specified, Data flow will appear at - the root level. - :type folder: ~azure.synapse.artifacts.models.DataFlowFolder + :param connection_string: Required. The connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param password: The Azure key vault secret reference of password in connection string. + :type password: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param service_principal_id: The ID of the service principal used to authenticate against Azure + SQL Data Warehouse. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The key of the service principal used to authenticate against + Azure SQL Data Warehouse. + :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal belongs. Type: + string (or Expression with resultType string). + :type tenant: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object """ _validation = { 'type': {'required': True}, + 'connection_string': {'required': True}, } _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DataFlowFolder'}, - } - - _subtype_map = { - 'type': {'MappingDataFlow': 'MappingDataFlow'} + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, **kwargs ): - super(DataFlow, self).__init__(**kwargs) - self.type = None - self.description = kwargs.get('description', None) - self.annotations = kwargs.get('annotations', None) - self.folder = kwargs.get('folder', None) + super(AzureSqlDWLinkedService, self).__init__(**kwargs) + self.type = 'AzureSqlDW' # type: str + self.connection_string = kwargs['connection_string'] + self.password = kwargs.get('password', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs.get('tenant', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) -class DataFlowDebugCommandRequest(msrest.serialization.Model): - """Request body structure for data flow expression preview. +class AzureSqlDWTableDataset(Dataset): + """The Azure SQL Data Warehouse dataset. All required parameters must be populated in order to send to Azure. - :param session_id: Required. The ID of data flow debug session. - :type session_id: str - :param data_flow_name: The data flow which contains the debug session. - :type data_flow_name: str - :param command_name: The command name. - :type command_name: str - :param command_payload: Required. The command payload object. - :type command_payload: object + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: This property will be retired. Please consider using schema + table + properties instead. + :type table_name: object + :param schema_type_properties_schema: The schema name of the Azure SQL Data Warehouse. Type: + string (or Expression with resultType string). + :type schema_type_properties_schema: object + :param table: The table name of the Azure SQL Data Warehouse. Type: string (or Expression with + resultType string). + :type table: object """ _validation = { - 'session_id': {'required': True}, - 'command_payload': {'required': True}, + 'type': {'required': True}, + 'linked_service_name': {'required': True}, } _attribute_map = { - 'session_id': {'key': 'sessionId', 'type': 'str'}, - 'data_flow_name': {'key': 'dataFlowName', 'type': 'str'}, - 'command_name': {'key': 'commandName', 'type': 'str'}, - 'command_payload': {'key': 'commandPayload', 'type': 'object'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, } def __init__( self, **kwargs ): - super(DataFlowDebugCommandRequest, self).__init__(**kwargs) - self.session_id = kwargs['session_id'] - self.data_flow_name = kwargs.get('data_flow_name', None) - self.command_name = kwargs.get('command_name', None) - self.command_payload = kwargs['command_payload'] - - -class DataFlowDebugCommandResponse(msrest.serialization.Model): - """Response body structure of data flow result for data preview, statistics or expression preview. - - :param status: The run status of data preview, statistics or expression preview. - :type status: str - :param data: The result data of data preview, statistics or expression preview. - :type data: str - """ - - _attribute_map = { - 'status': {'key': 'status', 'type': 'str'}, - 'data': {'key': 'data', 'type': 'str'}, - } + super(AzureSqlDWTableDataset, self).__init__(**kwargs) + self.type = 'AzureSqlDWTable' # type: str + self.table_name = kwargs.get('table_name', None) + self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) + self.table = kwargs.get('table', None) - def __init__( - self, - **kwargs - ): - super(DataFlowDebugCommandResponse, self).__init__(**kwargs) - self.status = kwargs.get('status', None) - self.data = kwargs.get('data', None) +class AzureSqlMILinkedService(LinkedService): + """Azure SQL Managed Instance linked service. -class DataFlowDebugPackage(msrest.serialization.Model): - """Request body structure for starting data flow debug session. + All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param session_id: The ID of data flow debug session. - :type session_id: str - :param data_flow: Data flow instance. - :type data_flow: ~azure.synapse.artifacts.models.DataFlowDebugResource - :param datasets: List of datasets. - :type datasets: list[~azure.synapse.artifacts.models.DatasetDebugResource] - :param linked_services: List of linked services. - :type linked_services: list[~azure.synapse.artifacts.models.LinkedServiceDebugResource] - :param staging: Staging info for debug session. - :type staging: ~azure.synapse.artifacts.models.DataFlowStagingInfo - :param debug_settings: Data flow debug settings. - :type debug_settings: ~azure.synapse.artifacts.models.DataFlowDebugPackageDebugSettings - """ - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'session_id': {'key': 'sessionId', 'type': 'str'}, - 'data_flow': {'key': 'dataFlow', 'type': 'DataFlowDebugResource'}, - 'datasets': {'key': 'datasets', 'type': '[DatasetDebugResource]'}, - 'linked_services': {'key': 'linkedServices', 'type': '[LinkedServiceDebugResource]'}, - 'staging': {'key': 'staging', 'type': 'DataFlowStagingInfo'}, - 'debug_settings': {'key': 'debugSettings', 'type': 'DataFlowDebugPackageDebugSettings'}, - } - - def __init__( - self, - **kwargs - ): - super(DataFlowDebugPackage, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.session_id = kwargs.get('session_id', None) - self.data_flow = kwargs.get('data_flow', None) - self.datasets = kwargs.get('datasets', None) - self.linked_services = kwargs.get('linked_services', None) - self.staging = kwargs.get('staging', None) - self.debug_settings = kwargs.get('debug_settings', None) - - -class DataFlowDebugPackageDebugSettings(msrest.serialization.Model): - """Data flow debug settings. - - :param source_settings: Source setting for data flow debug. - :type source_settings: list[~azure.synapse.artifacts.models.DataFlowSourceSetting] - :param parameters: Data flow parameters. - :type parameters: dict[str, object] - :param dataset_parameters: Parameters for dataset. - :type dataset_parameters: object + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param connection_string: Required. The connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param password: The Azure key vault secret reference of password in connection string. + :type password: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param service_principal_id: The ID of the service principal used to authenticate against Azure + SQL Managed Instance. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The key of the service principal used to authenticate against + Azure SQL Managed Instance. + :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal belongs. Type: + string (or Expression with resultType string). + :type tenant: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object """ - _attribute_map = { - 'source_settings': {'key': 'sourceSettings', 'type': '[DataFlowSourceSetting]'}, - 'parameters': {'key': 'parameters', 'type': '{object}'}, - 'dataset_parameters': {'key': 'datasetParameters', 'type': 'object'}, + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, } - def __init__( - self, - **kwargs - ): - super(DataFlowDebugPackageDebugSettings, self).__init__(**kwargs) - self.source_settings = kwargs.get('source_settings', None) - self.parameters = kwargs.get('parameters', None) - self.dataset_parameters = kwargs.get('dataset_parameters', None) - - -class DataFlowDebugPreviewDataRequest(msrest.serialization.Model): - """Request body structure for data flow preview data. - - :param session_id: The ID of data flow debug session. - :type session_id: str - :param data_flow_name: The data flow which contains the debug session. - :type data_flow_name: str - :param stream_name: The output stream name. - :type stream_name: str - :param row_limits: The row limit for preview request. - :type row_limits: int - """ - _attribute_map = { - 'session_id': {'key': 'sessionId', 'type': 'str'}, - 'data_flow_name': {'key': 'dataFlowName', 'type': 'str'}, - 'stream_name': {'key': 'streamName', 'type': 'str'}, - 'row_limits': {'key': 'rowLimits', 'type': 'int'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, **kwargs ): - super(DataFlowDebugPreviewDataRequest, self).__init__(**kwargs) - self.session_id = kwargs.get('session_id', None) - self.data_flow_name = kwargs.get('data_flow_name', None) - self.stream_name = kwargs.get('stream_name', None) - self.row_limits = kwargs.get('row_limits', None) + super(AzureSqlMILinkedService, self).__init__(**kwargs) + self.type = 'AzureSqlMI' # type: str + self.connection_string = kwargs['connection_string'] + self.password = kwargs.get('password', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs.get('tenant', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) -class DataFlowDebugQueryResponse(msrest.serialization.Model): - """Response body structure of data flow query for data preview, statistics or expression preview. +class AzureSqlMITableDataset(Dataset): + """The Azure SQL Managed Instance dataset. - :param run_id: The run ID of data flow debug session. - :type run_id: str + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: This property will be retired. Please consider using schema + table + properties instead. + :type table_name: object + :param schema_type_properties_schema: The schema name of the Azure SQL Managed Instance. Type: + string (or Expression with resultType string). + :type schema_type_properties_schema: object + :param table: The table name of the Azure SQL Managed Instance dataset. Type: string (or + Expression with resultType string). + :type table: object """ + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + _attribute_map = { - 'run_id': {'key': 'runId', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, } def __init__( self, **kwargs ): - super(DataFlowDebugQueryResponse, self).__init__(**kwargs) - self.run_id = kwargs.get('run_id', None) + super(AzureSqlMITableDataset, self).__init__(**kwargs) + self.type = 'AzureSqlMITable' # type: str + self.table_name = kwargs.get('table_name', None) + self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) + self.table = kwargs.get('table', None) -class SubResourceDebugResource(msrest.serialization.Model): - """Azure Synapse nested debug resource. +class AzureSqlSink(CopySink): + """A copy activity Azure SQL sink. - :param name: The resource name. - :type name: str + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or + Expression with resultType string). + :type sql_writer_stored_procedure_name: object + :param sql_writer_table_type: SQL writer table type. Type: string (or Expression with + resultType string). + :type sql_writer_table_type: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType + string). + :type pre_copy_script: object + :param stored_procedure_parameters: SQL stored procedure parameters. + :type stored_procedure_parameters: dict[str, + ~azure.synapse.artifacts.models.StoredProcedureParameter] + :param stored_procedure_table_type_parameter_name: The stored procedure parameter name of the + table type. Type: string (or Expression with resultType string). + :type stored_procedure_table_type_parameter_name: object + :param table_option: The option to handle sink table, such as autoCreate. For now only + 'autoCreate' value is supported. Type: string (or Expression with resultType string). + :type table_option: object """ + _validation = { + 'type': {'required': True}, + } + _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, + 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, } def __init__( self, **kwargs ): - super(SubResourceDebugResource, self).__init__(**kwargs) - self.name = kwargs.get('name', None) + super(AzureSqlSink, self).__init__(**kwargs) + self.type = 'AzureSqlSink' # type: str + self.sql_writer_stored_procedure_name = kwargs.get('sql_writer_stored_procedure_name', None) + self.sql_writer_table_type = kwargs.get('sql_writer_table_type', None) + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None) + self.table_option = kwargs.get('table_option', None) -class DataFlowDebugResource(SubResourceDebugResource): - """Data flow debug resource. +class AzureSqlSource(TabularSource): + """A copy activity Azure SQL source. All required parameters must be populated in order to send to Azure. - :param name: The resource name. - :type name: str - :param properties: Required. Data flow properties. - :type properties: ~azure.synapse.artifacts.models.DataFlow + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database + source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression + with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored procedure parameters. + Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.synapse.artifacts.models.StoredProcedureParameter] + :param produce_additional_types: Which additional types to produce. + :type produce_additional_types: object """ _validation = { - 'properties': {'required': True}, + 'type': {'required': True}, } _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'DataFlow'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, } def __init__( self, **kwargs ): - super(DataFlowDebugResource, self).__init__(**kwargs) - self.properties = kwargs['properties'] + super(AzureSqlSource, self).__init__(**kwargs) + self.type = 'AzureSqlSource' # type: str + self.sql_reader_query = kwargs.get('sql_reader_query', None) + self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.produce_additional_types = kwargs.get('produce_additional_types', None) -class DataFlowDebugResultResponse(msrest.serialization.Model): - """Response body structure of data flow result for data preview, statistics or expression preview. +class AzureSqlTableDataset(Dataset): + """The Azure SQL Server database dataset. - :param status: The run status of data preview, statistics or expression preview. - :type status: str - :param data: The result data of data preview, statistics or expression preview. - :type data: str - """ - - _attribute_map = { - 'status': {'key': 'status', 'type': 'str'}, - 'data': {'key': 'data', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(DataFlowDebugResultResponse, self).__init__(**kwargs) - self.status = kwargs.get('status', None) - self.data = kwargs.get('data', None) - - -class DataFlowDebugSessionInfo(msrest.serialization.Model): - """Data flow debug session info. + All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param data_flow_name: The name of the data flow. - :type data_flow_name: str - :param compute_type: Compute type of the cluster. - :type compute_type: str - :param core_count: Core count of the cluster. - :type core_count: int - :param node_count: Node count of the cluster. (deprecated property). - :type node_count: int - :param integration_runtime_name: Attached integration runtime name of data flow debug session. - :type integration_runtime_name: str - :param session_id: The ID of data flow debug session. - :type session_id: str - :param start_time: Start time of data flow debug session. - :type start_time: str - :param time_to_live_in_minutes: Compute type of the cluster. - :type time_to_live_in_minutes: int - :param last_activity_time: Last activity time of data flow debug session. - :type last_activity_time: str + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: This property will be retired. Please consider using schema + table + properties instead. + :type table_name: object + :param schema_type_properties_schema: The schema name of the Azure SQL database. Type: string + (or Expression with resultType string). + :type schema_type_properties_schema: object + :param table: The table name of the Azure SQL database. Type: string (or Expression with + resultType string). + :type table: object """ + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'data_flow_name': {'key': 'dataFlowName', 'type': 'str'}, - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'core_count': {'key': 'coreCount', 'type': 'int'}, - 'node_count': {'key': 'nodeCount', 'type': 'int'}, - 'integration_runtime_name': {'key': 'integrationRuntimeName', 'type': 'str'}, - 'session_id': {'key': 'sessionId', 'type': 'str'}, - 'start_time': {'key': 'startTime', 'type': 'str'}, - 'time_to_live_in_minutes': {'key': 'timeToLiveInMinutes', 'type': 'int'}, - 'last_activity_time': {'key': 'lastActivityTime', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, } def __init__( self, **kwargs ): - super(DataFlowDebugSessionInfo, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.data_flow_name = kwargs.get('data_flow_name', None) - self.compute_type = kwargs.get('compute_type', None) - self.core_count = kwargs.get('core_count', None) - self.node_count = kwargs.get('node_count', None) - self.integration_runtime_name = kwargs.get('integration_runtime_name', None) - self.session_id = kwargs.get('session_id', None) - self.start_time = kwargs.get('start_time', None) - self.time_to_live_in_minutes = kwargs.get('time_to_live_in_minutes', None) - self.last_activity_time = kwargs.get('last_activity_time', None) + super(AzureSqlTableDataset, self).__init__(**kwargs) + self.type = 'AzureSqlTable' # type: str + self.table_name = kwargs.get('table_name', None) + self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) + self.table = kwargs.get('table', None) -class DataFlowDebugStatisticsRequest(msrest.serialization.Model): - """Request body structure for data flow statistics. +class AzureStorageLinkedService(LinkedService): + """The storage account linked service. - :param session_id: The ID of data flow debug session. - :type session_id: str - :param data_flow_name: The data flow which contains the debug session. - :type data_flow_name: str - :param stream_name: The output stream name. - :type stream_name: str - :param columns: List of column names. - :type columns: list[str] + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param connection_string: The connection string. It is mutually exclusive with sasUri property. + Type: string, SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param account_key: The Azure key vault secret reference of accountKey in connection string. + :type account_key: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param sas_uri: SAS URI of the Azure Storage resource. It is mutually exclusive with + connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. + :type sas_uri: object + :param sas_token: The Azure key vault secret reference of sasToken in sas uri. + :type sas_token: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: str """ - _attribute_map = { - 'session_id': {'key': 'sessionId', 'type': 'str'}, - 'data_flow_name': {'key': 'dataFlowName', 'type': 'str'}, - 'stream_name': {'key': 'streamName', 'type': 'str'}, - 'columns': {'key': 'columns', 'type': '[str]'}, + _validation = { + 'type': {'required': True}, } - def __init__( - self, - **kwargs - ): - super(DataFlowDebugStatisticsRequest, self).__init__(**kwargs) - self.session_id = kwargs.get('session_id', None) - self.data_flow_name = kwargs.get('data_flow_name', None) - self.stream_name = kwargs.get('stream_name', None) - self.columns = kwargs.get('columns', None) - - -class DataFlowFolder(msrest.serialization.Model): - """The folder that this data flow is in. If not specified, Data flow will appear at the root level. - - :param name: The name of the folder that this data flow is in. - :type name: str - """ - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, + 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, + 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, } def __init__( self, **kwargs ): - super(DataFlowFolder, self).__init__(**kwargs) - self.name = kwargs.get('name', None) + super(AzureStorageLinkedService, self).__init__(**kwargs) + self.type = 'AzureStorage' # type: str + self.connection_string = kwargs.get('connection_string', None) + self.account_key = kwargs.get('account_key', None) + self.sas_uri = kwargs.get('sas_uri', None) + self.sas_token = kwargs.get('sas_token', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) -class DataFlowListResponse(msrest.serialization.Model): - """A list of data flow resources. +class AzureTableDataset(Dataset): + """The Azure Table storage dataset. All required parameters must be populated in order to send to Azure. - :param value: Required. List of data flows. - :type value: list[~azure.synapse.artifacts.models.DataFlowResource] - :param next_link: The link to the next page of results, if any remaining results exist. - :type next_link: str + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: Required. The table name of the Azure Table storage. Type: string (or + Expression with resultType string). + :type table_name: object """ _validation = { - 'value': {'required': True}, + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + 'table_name': {'required': True}, } _attribute_map = { - 'value': {'key': 'value', 'type': '[DataFlowResource]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } def __init__( self, **kwargs ): - super(DataFlowListResponse, self).__init__(**kwargs) - self.value = kwargs['value'] - self.next_link = kwargs.get('next_link', None) - + super(AzureTableDataset, self).__init__(**kwargs) + self.type = 'AzureTable' # type: str + self.table_name = kwargs['table_name'] -class DataFlowReference(msrest.serialization.Model): - """Data flow reference type. - Variables are only populated by the server, and will be ignored when sending a request. +class AzureTableSink(CopySink): + """A copy activity Azure Table sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :ivar type: Required. Data flow reference type. Default value: "DataFlowReference". - :vartype type: str - :param reference_name: Required. Reference data flow name. - :type reference_name: str - :param dataset_parameters: Reference data flow parameters from dataset. - :type dataset_parameters: object - """ - + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param azure_table_default_partition_key_value: Azure Table default partition key value. Type: + string (or Expression with resultType string). + :type azure_table_default_partition_key_value: object + :param azure_table_partition_key_name: Azure Table partition key name. Type: string (or + Expression with resultType string). + :type azure_table_partition_key_name: object + :param azure_table_row_key_name: Azure Table row key name. Type: string (or Expression with + resultType string). + :type azure_table_row_key_name: object + :param azure_table_insert_type: Azure Table insert type. Type: string (or Expression with + resultType string). + :type azure_table_insert_type: object + """ + _validation = { - 'type': {'required': True, 'constant': True}, - 'reference_name': {'required': True}, + 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'reference_name': {'key': 'referenceName', 'type': 'str'}, - 'dataset_parameters': {'key': 'datasetParameters', 'type': 'object'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'azure_table_default_partition_key_value': {'key': 'azureTableDefaultPartitionKeyValue', 'type': 'object'}, + 'azure_table_partition_key_name': {'key': 'azureTablePartitionKeyName', 'type': 'object'}, + 'azure_table_row_key_name': {'key': 'azureTableRowKeyName', 'type': 'object'}, + 'azure_table_insert_type': {'key': 'azureTableInsertType', 'type': 'object'}, } - type = "DataFlowReference" - def __init__( self, **kwargs ): - super(DataFlowReference, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.reference_name = kwargs['reference_name'] - self.dataset_parameters = kwargs.get('dataset_parameters', None) + super(AzureTableSink, self).__init__(**kwargs) + self.type = 'AzureTableSink' # type: str + self.azure_table_default_partition_key_value = kwargs.get('azure_table_default_partition_key_value', None) + self.azure_table_partition_key_name = kwargs.get('azure_table_partition_key_name', None) + self.azure_table_row_key_name = kwargs.get('azure_table_row_key_name', None) + self.azure_table_insert_type = kwargs.get('azure_table_insert_type', None) -class SubResource(msrest.serialization.Model): - """Azure Synapse nested resource, which belongs to a workspace. +class AzureTableSource(TabularSource): + """A copy activity Azure Table source. - Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param azure_table_source_query: Azure Table source query. Type: string (or Expression with + resultType string). + :type azure_table_source_query: object + :param azure_table_source_ignore_table_not_found: Azure Table source ignore table not found. + Type: boolean (or Expression with resultType boolean). + :type azure_table_source_ignore_table_not_found: object """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, + 'type': {'required': True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'azure_table_source_query': {'key': 'azureTableSourceQuery', 'type': 'object'}, + 'azure_table_source_ignore_table_not_found': {'key': 'azureTableSourceIgnoreTableNotFound', 'type': 'object'}, } def __init__( self, **kwargs ): - super(SubResource, self).__init__(**kwargs) - self.id = None - self.name = None - self.type = None - self.etag = None - + super(AzureTableSource, self).__init__(**kwargs) + self.type = 'AzureTableSource' # type: str + self.azure_table_source_query = kwargs.get('azure_table_source_query', None) + self.azure_table_source_ignore_table_not_found = kwargs.get('azure_table_source_ignore_table_not_found', None) -class DataFlowResource(SubResource): - """Data flow resource type. - Variables are only populated by the server, and will be ignored when sending a request. +class AzureTableStorageLinkedService(LinkedService): + """The azure table storage linked service. All required parameters must be populated in order to send to Azure. - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - :param properties: Required. Data flow properties. - :type properties: ~azure.synapse.artifacts.models.DataFlow + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param connection_string: The connection string. It is mutually exclusive with sasUri property. + Type: string, SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param account_key: The Azure key vault secret reference of accountKey in connection string. + :type account_key: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param sas_uri: SAS URI of the Azure Storage resource. It is mutually exclusive with + connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. + :type sas_uri: object + :param sas_token: The Azure key vault secret reference of sasToken in sas uri. + :type sas_token: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: str """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'properties': {'required': True}, + 'type': {'required': True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'DataFlow'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, + 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, + 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, } def __init__( self, **kwargs ): - super(DataFlowResource, self).__init__(**kwargs) - self.properties = kwargs['properties'] + super(AzureTableStorageLinkedService, self).__init__(**kwargs) + self.type = 'AzureTableStorage' # type: str + self.connection_string = kwargs.get('connection_string', None) + self.account_key = kwargs.get('account_key', None) + self.sas_uri = kwargs.get('sas_uri', None) + self.sas_token = kwargs.get('sas_token', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) -class Transformation(msrest.serialization.Model): - """A data flow transformation. +class BigDataPoolReference(msrest.serialization.Model): + """Big data pool reference. All required parameters must be populated in order to send to Azure. - :param name: Required. Transformation name. - :type name: str - :param description: Transformation description. - :type description: str + :param type: Required. Big data pool reference type. Possible values include: + "BigDataPoolReference". + :type type: str or ~azure.synapse.artifacts.models.BigDataPoolReferenceType + :param reference_name: Required. Reference big data pool name. + :type reference_name: str """ _validation = { - 'name': {'required': True}, + 'type': {'required': True}, + 'reference_name': {'required': True}, } _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, } def __init__( self, **kwargs ): - super(Transformation, self).__init__(**kwargs) - self.name = kwargs['name'] - self.description = kwargs.get('description', None) + super(BigDataPoolReference, self).__init__(**kwargs) + self.type = kwargs['type'] + self.reference_name = kwargs['reference_name'] -class DataFlowSink(Transformation): - """Transformation for data flow sink. +class TrackedResource(Resource): + """The resource model definition for a ARM tracked top level resource. + + Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :param name: Required. Transformation name. - :type name: str - :param description: Transformation description. - :type description: str - :param dataset: Dataset reference. - :type dataset: ~azure.synapse.artifacts.models.DatasetReference + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + :param tags: A set of tags. Resource tags. + :type tags: dict[str, str] + :param location: Required. The geo-location where the resource lives. + :type location: str """ _validation = { - 'name': {'required': True}, + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'location': {'required': True}, } _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'dataset': {'key': 'dataset', 'type': 'DatasetReference'}, + 'type': {'key': 'type', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'location': {'key': 'location', 'type': 'str'}, } def __init__( self, **kwargs ): - super(DataFlowSink, self).__init__(**kwargs) - self.dataset = kwargs.get('dataset', None) + super(TrackedResource, self).__init__(**kwargs) + self.tags = kwargs.get('tags', None) + self.location = kwargs['location'] -class DataFlowSource(Transformation): - """Transformation for data flow source. +class BigDataPoolResourceInfo(TrackedResource): + """A Big Data pool. + + Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :param name: Required. Transformation name. - :type name: str - :param description: Transformation description. - :type description: str - :param dataset: Dataset reference. - :type dataset: ~azure.synapse.artifacts.models.DatasetReference + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + :param tags: A set of tags. Resource tags. + :type tags: dict[str, str] + :param location: Required. The geo-location where the resource lives. + :type location: str + :param provisioning_state: The state of the Big Data pool. + :type provisioning_state: str + :param auto_scale: Auto-scaling properties. + :type auto_scale: ~azure.synapse.artifacts.models.AutoScaleProperties + :param creation_date: The time when the Big Data pool was created. + :type creation_date: ~datetime.datetime + :param auto_pause: Auto-pausing properties. + :type auto_pause: ~azure.synapse.artifacts.models.AutoPauseProperties + :param is_compute_isolation_enabled: Whether compute isolation is required or not. + :type is_compute_isolation_enabled: bool + :param spark_events_folder: The Spark events folder. + :type spark_events_folder: str + :param node_count: The number of nodes in the Big Data pool. + :type node_count: int + :param library_requirements: Library version requirements. + :type library_requirements: ~azure.synapse.artifacts.models.LibraryRequirements + :param spark_version: The Apache Spark version. + :type spark_version: str + :param default_spark_log_folder: The default folder where Spark logs will be written. + :type default_spark_log_folder: str + :param node_size: The level of compute power that each node in the Big Data pool has. Possible + values include: "None", "Small", "Medium", "Large", "XLarge", "XXLarge". + :type node_size: str or ~azure.synapse.artifacts.models.NodeSize + :param node_size_family: The kind of nodes that the Big Data pool provides. Possible values + include: "None", "MemoryOptimized". + :type node_size_family: str or ~azure.synapse.artifacts.models.NodeSizeFamily """ _validation = { - 'name': {'required': True}, + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'location': {'required': True}, } _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'dataset': {'key': 'dataset', 'type': 'DatasetReference'}, + 'type': {'key': 'type', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'location': {'key': 'location', 'type': 'str'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + 'auto_scale': {'key': 'properties.autoScale', 'type': 'AutoScaleProperties'}, + 'creation_date': {'key': 'properties.creationDate', 'type': 'iso-8601'}, + 'auto_pause': {'key': 'properties.autoPause', 'type': 'AutoPauseProperties'}, + 'is_compute_isolation_enabled': {'key': 'properties.isComputeIsolationEnabled', 'type': 'bool'}, + 'spark_events_folder': {'key': 'properties.sparkEventsFolder', 'type': 'str'}, + 'node_count': {'key': 'properties.nodeCount', 'type': 'int'}, + 'library_requirements': {'key': 'properties.libraryRequirements', 'type': 'LibraryRequirements'}, + 'spark_version': {'key': 'properties.sparkVersion', 'type': 'str'}, + 'default_spark_log_folder': {'key': 'properties.defaultSparkLogFolder', 'type': 'str'}, + 'node_size': {'key': 'properties.nodeSize', 'type': 'str'}, + 'node_size_family': {'key': 'properties.nodeSizeFamily', 'type': 'str'}, } def __init__( self, **kwargs ): - super(DataFlowSource, self).__init__(**kwargs) - self.dataset = kwargs.get('dataset', None) + super(BigDataPoolResourceInfo, self).__init__(**kwargs) + self.provisioning_state = kwargs.get('provisioning_state', None) + self.auto_scale = kwargs.get('auto_scale', None) + self.creation_date = kwargs.get('creation_date', None) + self.auto_pause = kwargs.get('auto_pause', None) + self.is_compute_isolation_enabled = kwargs.get('is_compute_isolation_enabled', None) + self.spark_events_folder = kwargs.get('spark_events_folder', None) + self.node_count = kwargs.get('node_count', None) + self.library_requirements = kwargs.get('library_requirements', None) + self.spark_version = kwargs.get('spark_version', None) + self.default_spark_log_folder = kwargs.get('default_spark_log_folder', None) + self.node_size = kwargs.get('node_size', None) + self.node_size_family = kwargs.get('node_size_family', None) -class DataFlowSourceSetting(msrest.serialization.Model): - """Definition of data flow source setting for debug. +class BigDataPoolResourceInfoListResult(msrest.serialization.Model): + """Collection of Big Data pool information. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param source_name: The data flow source name. - :type source_name: str - :param row_limit: Defines the row limit of data flow source in debug. - :type row_limit: int + :param next_link: Link to the next page of results. + :type next_link: str + :param value: List of Big Data pools. + :type value: list[~azure.synapse.artifacts.models.BigDataPoolResourceInfo] """ _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_name': {'key': 'sourceName', 'type': 'str'}, - 'row_limit': {'key': 'rowLimit', 'type': 'int'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'value': {'key': 'value', 'type': '[BigDataPoolResourceInfo]'}, } def __init__( self, **kwargs ): - super(DataFlowSourceSetting, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.source_name = kwargs.get('source_name', None) - self.row_limit = kwargs.get('row_limit', None) + super(BigDataPoolResourceInfoListResult, self).__init__(**kwargs) + self.next_link = kwargs.get('next_link', None) + self.value = kwargs.get('value', None) -class DataFlowStagingInfo(msrest.serialization.Model): - """Staging info for execute data flow activity. - - :param linked_service: Staging linked service reference. - :type linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference - :param folder_path: Folder path for staging blob. - :type folder_path: str - """ - - _attribute_map = { - 'linked_service': {'key': 'linkedService', 'type': 'LinkedServiceReference'}, - 'folder_path': {'key': 'folderPath', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(DataFlowStagingInfo, self).__init__(**kwargs) - self.linked_service = kwargs.get('linked_service', None) - self.folder_path = kwargs.get('folder_path', None) - - -class DataLakeAnalyticsUSQLActivity(ExecutionActivity): - """Data Lake Analytics U-SQL activity. +class BinaryDataset(Dataset): + """Binary dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. Type of dataset.Constant filled by server. :type type: str - :param description: Activity description. + :param description: Dataset description. :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param script_path: Required. Case-sensitive path to folder that contains the U-SQL script. - Type: string (or Expression with resultType string). - :type script_path: object - :param script_linked_service: Required. Script linked service reference. - :type script_linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference - :param degree_of_parallelism: The maximum number of nodes simultaneously used to run the job. - Default value is 1. Type: integer (or Expression with resultType integer), minimum: 1. - :type degree_of_parallelism: object - :param priority: Determines which jobs out of all that are queued should be selected to run - first. The lower the number, the higher the priority. Default value is 1000. Type: integer (or - Expression with resultType integer), minimum: 1. - :type priority: object - :param parameters: Parameters for U-SQL job request. - :type parameters: dict[str, object] - :param runtime_version: Runtime version of the U-SQL engine to use. Type: string (or Expression - with resultType string). - :type runtime_version: object - :param compilation_mode: Compilation mode of U-SQL. Must be one of these values : Semantic, - Full and SingleBox. Type: string (or Expression with resultType string). - :type compilation_mode: object + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param location: The location of the Binary storage. + :type location: ~azure.synapse.artifacts.models.DatasetLocation + :param compression: The data compression method used for the binary dataset. + :type compression: ~azure.synapse.artifacts.models.DatasetCompression """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, - 'script_path': {'required': True}, - 'script_linked_service': {'required': True}, + 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'script_path': {'key': 'typeProperties.scriptPath', 'type': 'object'}, - 'script_linked_service': {'key': 'typeProperties.scriptLinkedService', 'type': 'LinkedServiceReference'}, - 'degree_of_parallelism': {'key': 'typeProperties.degreeOfParallelism', 'type': 'object'}, - 'priority': {'key': 'typeProperties.priority', 'type': 'object'}, - 'parameters': {'key': 'typeProperties.parameters', 'type': '{object}'}, - 'runtime_version': {'key': 'typeProperties.runtimeVersion', 'type': 'object'}, - 'compilation_mode': {'key': 'typeProperties.compilationMode', 'type': 'object'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, } def __init__( self, **kwargs ): - super(DataLakeAnalyticsUSQLActivity, self).__init__(**kwargs) - self.type = 'DataLakeAnalyticsU-SQL' - self.script_path = kwargs['script_path'] - self.script_linked_service = kwargs['script_linked_service'] - self.degree_of_parallelism = kwargs.get('degree_of_parallelism', None) - self.priority = kwargs.get('priority', None) - self.parameters = kwargs.get('parameters', None) - self.runtime_version = kwargs.get('runtime_version', None) - self.compilation_mode = kwargs.get('compilation_mode', None) - + super(BinaryDataset, self).__init__(**kwargs) + self.type = 'Binary' # type: str + self.location = kwargs.get('location', None) + self.compression = kwargs.get('compression', None) -class DatasetCompression(msrest.serialization.Model): - """The compression method used on a dataset. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: DatasetBZip2Compression, DatasetDeflateCompression, DatasetGZipCompression, DatasetZipDeflateCompression. +class BinarySink(CopySink): + """A copy activity Binary sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset compression.Constant filled by server. + :param type: Required. Copy sink type.Constant filled by server. :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param store_settings: Binary store settings. + :type store_settings: ~azure.synapse.artifacts.models.StoreWriteSettings """ _validation = { @@ -5890,31 +6148,44 @@ class DatasetCompression(msrest.serialization.Model): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'BZip2': 'DatasetBZip2Compression', 'Deflate': 'DatasetDeflateCompression', 'GZip': 'DatasetGZipCompression', 'ZipDeflate': 'DatasetZipDeflateCompression'} + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, } def __init__( self, **kwargs ): - super(DatasetCompression, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.type = 'DatasetCompression' + super(BinarySink, self).__init__(**kwargs) + self.type = 'BinarySink' # type: str + self.store_settings = kwargs.get('store_settings', None) -class DatasetBZip2Compression(DatasetCompression): - """The BZip2 compression method used on a dataset. +class BinarySource(CopySource): + """A copy activity Binary source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset compression.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param store_settings: Binary store settings. + :type store_settings: ~azure.synapse.artifacts.models.StoreReadSettings """ _validation = { @@ -5924,322 +6195,386 @@ class DatasetBZip2Compression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, } def __init__( self, **kwargs ): - super(DatasetBZip2Compression, self).__init__(**kwargs) - self.type = 'BZip2' - - -class DatasetDebugResource(SubResourceDebugResource): - """Dataset debug resource. - - All required parameters must be populated in order to send to Azure. - - :param name: The resource name. - :type name: str - :param properties: Required. Dataset properties. - :type properties: ~azure.synapse.artifacts.models.Dataset - """ - - _validation = { - 'properties': {'required': True}, - } + super(BinarySource, self).__init__(**kwargs) + self.type = 'BinarySource' # type: str + self.store_settings = kwargs.get('store_settings', None) - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'Dataset'}, - } - def __init__( - self, - **kwargs - ): - super(DatasetDebugResource, self).__init__(**kwargs) - self.properties = kwargs['properties'] +class Trigger(msrest.serialization.Model): + """Azure Synapse nested object which contains information about creating pipeline run. + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: ChainingTrigger, MultiplePipelineTrigger, RerunTumblingWindowTrigger, TumblingWindowTrigger. -class DatasetDeflateCompression(DatasetCompression): - """The Deflate compression method used on a dataset. + Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset compression.Constant filled by server. + :param type: Required. Trigger type.Constant filled by server. :type type: str - :param level: The Deflate compression level. Possible values include: "Optimal", "Fastest". - :type level: str or ~azure.synapse.artifacts.models.DatasetCompressionLevel + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are + called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". + :vartype runtime_state: str or ~azure.synapse.artifacts.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the trigger. + :type annotations: list[object] """ _validation = { 'type': {'required': True}, + 'runtime_state': {'readonly': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, } - def __init__( - self, - **kwargs - ): - super(DatasetDeflateCompression, self).__init__(**kwargs) - self.type = 'Deflate' - self.level = kwargs.get('level', None) - - -class DatasetFolder(msrest.serialization.Model): - """The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - - :param name: The name of the folder that this Dataset is in. - :type name: str - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, + _subtype_map = { + 'type': {'ChainingTrigger': 'ChainingTrigger', 'MultiplePipelineTrigger': 'MultiplePipelineTrigger', 'RerunTumblingWindowTrigger': 'RerunTumblingWindowTrigger', 'TumblingWindowTrigger': 'TumblingWindowTrigger'} } def __init__( self, **kwargs ): - super(DatasetFolder, self).__init__(**kwargs) - self.name = kwargs.get('name', None) + super(Trigger, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = 'Trigger' # type: str + self.description = kwargs.get('description', None) + self.runtime_state = None + self.annotations = kwargs.get('annotations', None) -class DatasetGZipCompression(DatasetCompression): - """The GZip compression method used on a dataset. +class MultiplePipelineTrigger(Trigger): + """Base class for all triggers that support one to many model for trigger to pipeline. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: BlobEventsTrigger, BlobTrigger, ScheduleTrigger. + + Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset compression.Constant filled by server. + :param type: Required. Trigger type.Constant filled by server. :type type: str - :param level: The GZip compression level. Possible values include: "Optimal", "Fastest". - :type level: str or ~azure.synapse.artifacts.models.DatasetCompressionLevel + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are + called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". + :vartype runtime_state: str or ~azure.synapse.artifacts.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the trigger. + :type annotations: list[object] + :param pipelines: Pipelines that need to be started. + :type pipelines: list[~azure.synapse.artifacts.models.TriggerPipelineReference] """ _validation = { 'type': {'required': True}, + 'runtime_state': {'readonly': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, + } + + _subtype_map = { + 'type': {'BlobEventsTrigger': 'BlobEventsTrigger', 'BlobTrigger': 'BlobTrigger', 'ScheduleTrigger': 'ScheduleTrigger'} } def __init__( self, **kwargs ): - super(DatasetGZipCompression, self).__init__(**kwargs) - self.type = 'GZip' - self.level = kwargs.get('level', None) - - -class DatasetListResponse(msrest.serialization.Model): - """A list of dataset resources. - - All required parameters must be populated in order to send to Azure. - - :param value: Required. List of datasets. - :type value: list[~azure.synapse.artifacts.models.DatasetResource] - :param next_link: The link to the next page of results, if any remaining results exist. - :type next_link: str - """ - - _validation = { - 'value': {'required': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': '[DatasetResource]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(DatasetListResponse, self).__init__(**kwargs) - self.value = kwargs['value'] - self.next_link = kwargs.get('next_link', None) + super(MultiplePipelineTrigger, self).__init__(**kwargs) + self.type = 'MultiplePipelineTrigger' # type: str + self.pipelines = kwargs.get('pipelines', None) -class DatasetLocation(msrest.serialization.Model): - """Dataset location. +class BlobEventsTrigger(MultiplePipelineTrigger): + """Trigger that runs every time a Blob event occurs. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: . + Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location.Constant filled by server. + :param type: Required. Trigger type.Constant filled by server. :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or Expression with - resultType string). - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType - string). - :type file_name: object + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are + called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". + :vartype runtime_state: str or ~azure.synapse.artifacts.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the trigger. + :type annotations: list[object] + :param pipelines: Pipelines that need to be started. + :type pipelines: list[~azure.synapse.artifacts.models.TriggerPipelineReference] + :param blob_path_begins_with: The blob path must begin with the pattern provided for trigger to + fire. For example, '/records/blobs/december/' will only fire the trigger for blobs in the + december folder under the records container. At least one of these must be provided: + blobPathBeginsWith, blobPathEndsWith. + :type blob_path_begins_with: str + :param blob_path_ends_with: The blob path must end with the pattern provided for trigger to + fire. For example, 'december/boxes.csv' will only fire the trigger for blobs named boxes in a + december folder. At least one of these must be provided: blobPathBeginsWith, blobPathEndsWith. + :type blob_path_ends_with: str + :param ignore_empty_blobs: If set to true, blobs with zero bytes will be ignored. + :type ignore_empty_blobs: bool + :param events: Required. The type of events that cause this trigger to fire. + :type events: list[str or ~azure.synapse.artifacts.models.BlobEventTypes] + :param scope: Required. The ARM resource ID of the Storage Account. + :type scope: str """ _validation = { 'type': {'required': True}, + 'runtime_state': {'readonly': True}, + 'events': {'required': True}, + 'scope': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - } - - _subtype_map = { - 'type': {} + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, + 'blob_path_begins_with': {'key': 'typeProperties.blobPathBeginsWith', 'type': 'str'}, + 'blob_path_ends_with': {'key': 'typeProperties.blobPathEndsWith', 'type': 'str'}, + 'ignore_empty_blobs': {'key': 'typeProperties.ignoreEmptyBlobs', 'type': 'bool'}, + 'events': {'key': 'typeProperties.events', 'type': '[str]'}, + 'scope': {'key': 'typeProperties.scope', 'type': 'str'}, } def __init__( self, **kwargs ): - super(DatasetLocation, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.type = 'DatasetLocation' - self.folder_path = kwargs.get('folder_path', None) - self.file_name = kwargs.get('file_name', None) - + super(BlobEventsTrigger, self).__init__(**kwargs) + self.type = 'BlobEventsTrigger' # type: str + self.blob_path_begins_with = kwargs.get('blob_path_begins_with', None) + self.blob_path_ends_with = kwargs.get('blob_path_ends_with', None) + self.ignore_empty_blobs = kwargs.get('ignore_empty_blobs', None) + self.events = kwargs['events'] + self.scope = kwargs['scope'] -class DatasetReference(msrest.serialization.Model): - """Dataset reference type. - Variables are only populated by the server, and will be ignored when sending a request. +class BlobSink(CopySink): + """A copy activity Azure Blob sink. All required parameters must be populated in order to send to Azure. - :ivar type: Required. Dataset reference type. Default value: "DatasetReference". - :vartype type: str - :param reference_name: Required. Reference dataset name. - :type reference_name: str - :param parameters: Arguments for dataset. - :type parameters: dict[str, object] + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param blob_writer_overwrite_files: Blob writer overwrite files. Type: boolean (or Expression + with resultType boolean). + :type blob_writer_overwrite_files: object + :param blob_writer_date_time_format: Blob writer date time format. Type: string (or Expression + with resultType string). + :type blob_writer_date_time_format: object + :param blob_writer_add_header: Blob writer add header. Type: boolean (or Expression with + resultType boolean). + :type blob_writer_add_header: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object """ _validation = { - 'type': {'required': True, 'constant': True}, - 'reference_name': {'required': True}, + 'type': {'required': True}, } _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'reference_name': {'key': 'referenceName', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'blob_writer_overwrite_files': {'key': 'blobWriterOverwriteFiles', 'type': 'object'}, + 'blob_writer_date_time_format': {'key': 'blobWriterDateTimeFormat', 'type': 'object'}, + 'blob_writer_add_header': {'key': 'blobWriterAddHeader', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } - type = "DatasetReference" - def __init__( self, **kwargs ): - super(DatasetReference, self).__init__(**kwargs) - self.reference_name = kwargs['reference_name'] - self.parameters = kwargs.get('parameters', None) + super(BlobSink, self).__init__(**kwargs) + self.type = 'BlobSink' # type: str + self.blob_writer_overwrite_files = kwargs.get('blob_writer_overwrite_files', None) + self.blob_writer_date_time_format = kwargs.get('blob_writer_date_time_format', None) + self.blob_writer_add_header = kwargs.get('blob_writer_add_header', None) + self.copy_behavior = kwargs.get('copy_behavior', None) -class DatasetResource(SubResource): - """Dataset resource type. - - Variables are only populated by the server, and will be ignored when sending a request. +class BlobSource(CopySource): + """A copy activity Azure Blob source. All required parameters must be populated in order to send to Azure. - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - :param properties: Required. Dataset properties. - :type properties: ~azure.synapse.artifacts.models.Dataset + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param treat_empty_as_null: Treat empty as null. Type: boolean (or Expression with resultType + boolean). + :type treat_empty_as_null: object + :param skip_header_line_count: Number of header lines to skip from each blob. Type: integer (or + Expression with resultType integer). + :type skip_header_line_count: object + :param recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :type recursive: object """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'properties': {'required': True}, + 'type': {'required': True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'Dataset'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, + 'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, } def __init__( self, **kwargs ): - super(DatasetResource, self).__init__(**kwargs) - self.properties = kwargs['properties'] + super(BlobSource, self).__init__(**kwargs) + self.type = 'BlobSource' # type: str + self.treat_empty_as_null = kwargs.get('treat_empty_as_null', None) + self.skip_header_line_count = kwargs.get('skip_header_line_count', None) + self.recursive = kwargs.get('recursive', None) -class DatasetZipDeflateCompression(DatasetCompression): - """The ZipDeflate compression method used on a dataset. +class BlobTrigger(MultiplePipelineTrigger): + """Trigger that runs every time the selected Blob container changes. + + Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset compression.Constant filled by server. + :param type: Required. Trigger type.Constant filled by server. :type type: str - :param level: The ZipDeflate compression level. Possible values include: "Optimal", "Fastest". - :type level: str or ~azure.synapse.artifacts.models.DatasetCompressionLevel + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are + called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". + :vartype runtime_state: str or ~azure.synapse.artifacts.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the trigger. + :type annotations: list[object] + :param pipelines: Pipelines that need to be started. + :type pipelines: list[~azure.synapse.artifacts.models.TriggerPipelineReference] + :param folder_path: Required. The path of the container/folder that will trigger the pipeline. + :type folder_path: str + :param max_concurrency: Required. The max number of parallel files to handle when it is + triggered. + :type max_concurrency: int + :param linked_service: Required. The Azure Storage linked service reference. + :type linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference """ _validation = { 'type': {'required': True}, + 'runtime_state': {'readonly': True}, + 'folder_path': {'required': True}, + 'max_concurrency': {'required': True}, + 'linked_service': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, + 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'str'}, + 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, + 'linked_service': {'key': 'typeProperties.linkedService', 'type': 'LinkedServiceReference'}, } def __init__( self, **kwargs ): - super(DatasetZipDeflateCompression, self).__init__(**kwargs) - self.type = 'ZipDeflate' - self.level = kwargs.get('level', None) - + super(BlobTrigger, self).__init__(**kwargs) + self.type = 'BlobTrigger' # type: str + self.folder_path = kwargs['folder_path'] + self.max_concurrency = kwargs['max_concurrency'] + self.linked_service = kwargs['linked_service'] -class Db2LinkedService(LinkedService): - """Linked service for DB2 data source. - Variables are only populated by the server, and will be ignored when sending a request. +class CassandraLinkedService(LinkedService): + """Linked service for Cassandra data source. All required parameters must be populated in order to send to Azure. @@ -6256,26 +6591,20 @@ class Db2LinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param server: Required. Server name for connection. Type: string (or Expression with - resultType string). - :type server: object - :param database: Required. Database name for connection. Type: string (or Expression with - resultType string). - :type database: object - :ivar authentication_type: AuthenticationType to be used for connection. Default value: - "Basic". - :vartype authentication_type: str + :param host: Required. Host name for connection. Type: string (or Expression with resultType + string). + :type host: object + :param authentication_type: AuthenticationType to be used for connection. Type: string (or + Expression with resultType string). + :type authentication_type: object + :param port: The port for the connection. Type: integer (or Expression with resultType + integer). + :type port: object :param username: Username for authentication. Type: string (or Expression with resultType string). :type username: object :param password: Password for authentication. :type password: ~azure.synapse.artifacts.models.SecretBase - :param package_collection: Under where packages are created when querying database. Type: - string (or Expression with resultType string). - :type package_collection: object - :param certificate_common_name: Certificate Common Name when TLS is enabled. Type: string (or - Expression with resultType string). - :type certificate_common_name: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -6284,9 +6613,7 @@ class Db2LinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'server': {'required': True}, - 'database': {'required': True}, - 'authentication_type': {'constant': True}, + 'host': {'required': True}, } _attribute_map = { @@ -6296,68 +6623,121 @@ class Db2LinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'package_collection': {'key': 'typeProperties.packageCollection', 'type': 'object'}, - 'certificate_common_name': {'key': 'typeProperties.certificateCommonName', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - authentication_type = "Basic" - def __init__( self, **kwargs ): - super(Db2LinkedService, self).__init__(**kwargs) - self.type = 'Db2' - self.server = kwargs['server'] - self.database = kwargs['database'] + super(CassandraLinkedService, self).__init__(**kwargs) + self.type = 'Cassandra' # type: str + self.host = kwargs['host'] + self.authentication_type = kwargs.get('authentication_type', None) + self.port = kwargs.get('port', None) self.username = kwargs.get('username', None) self.password = kwargs.get('password', None) - self.package_collection = kwargs.get('package_collection', None) - self.certificate_common_name = kwargs.get('certificate_common_name', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) -class Db2TableDataset(Dataset): - """The Db2 table dataset. +class CassandraSource(TabularSource): + """A copy activity source for a Cassandra database. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: Database query. Should be a SQL-92 query expression or Cassandra Query Language + (CQL) command. Type: string (or Expression with resultType string). + :type query: object + :param consistency_level: The consistency level specifies how many Cassandra servers must + respond to a read request before returning data to the client application. Cassandra checks the + specified number of Cassandra servers for data to satisfy the read request. Must be one of + cassandraSourceReadConsistencyLevels. The default value is 'ONE'. It is case-insensitive. + Possible values include: "ALL", "EACH_QUORUM", "QUORUM", "LOCAL_QUORUM", "ONE", "TWO", "THREE", + "LOCAL_ONE", "SERIAL", "LOCAL_SERIAL". + :type consistency_level: str or + ~azure.synapse.artifacts.models.CassandraSourceReadConsistencyLevels + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + 'consistency_level': {'key': 'consistencyLevel', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(CassandraSource, self).__init__(**kwargs) + self.type = 'CassandraSource' # type: str + self.query = kwargs.get('query', None) + self.consistency_level = kwargs.get('consistency_level', None) + + +class CassandraTableDataset(Dataset): + """The Cassandra database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table - properties instead. + :param table_name: The table name of the Cassandra database. Type: string (or Expression with + resultType string). :type table_name: object - :param schema_type_properties_schema: The Db2 schema name. Type: string (or Expression with + :param keyspace: The keyspace of the Cassandra database. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: object - :param table: The Db2 table name. Type: string (or Expression with resultType string). - :type table: object + :type keyspace: object """ _validation = { @@ -6376,120 +6756,118 @@ class Db2TableDataset(Dataset): 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'keyspace': {'key': 'typeProperties.keyspace', 'type': 'object'}, } def __init__( self, **kwargs ): - super(Db2TableDataset, self).__init__(**kwargs) - self.type = 'Db2Table' + super(CassandraTableDataset, self).__init__(**kwargs) + self.type = 'CassandraTable' # type: str self.table_name = kwargs.get('table_name', None) - self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) - self.table = kwargs.get('table', None) + self.keyspace = kwargs.get('keyspace', None) -class DeleteActivity(ExecutionActivity): - """Delete activity. +class ChainingTrigger(Trigger): + """Trigger that allows the referenced pipeline to depend on other pipeline runs based on runDimension Name/Value pairs. Upstream pipelines should declare the same runDimension Name and their runs should have the values for those runDimensions. The referenced pipeline run would be triggered if the values for the runDimension match for all upstream pipeline runs. + + Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. Trigger type.Constant filled by server. :type type: str - :param description: Activity description. + :param description: Trigger description. :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param recursive: If true, files or sub-folders under current folder path will be deleted - recursively. Default is false. Type: boolean (or Expression with resultType boolean). - :type recursive: object - :param max_concurrent_connections: The max concurrent connections to connect data source at the - same time. - :type max_concurrent_connections: int - :param enable_logging: Whether to record detailed logs of delete-activity execution. Default - value is false. Type: boolean (or Expression with resultType boolean). - :type enable_logging: object - :param log_storage_settings: Log storage settings customer need to provide when enableLogging - is true. - :type log_storage_settings: ~azure.synapse.artifacts.models.LogStorageSettings - :param dataset: Required. Delete activity dataset reference. - :type dataset: ~azure.synapse.artifacts.models.DatasetReference + :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are + called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". + :vartype runtime_state: str or ~azure.synapse.artifacts.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the trigger. + :type annotations: list[object] + :param pipeline: Required. Pipeline for which runs are created when all upstream pipelines + complete successfully. + :type pipeline: ~azure.synapse.artifacts.models.TriggerPipelineReference + :param depends_on: Required. Upstream Pipelines. + :type depends_on: list[~azure.synapse.artifacts.models.PipelineReference] + :param run_dimension: Required. Run Dimension property that needs to be emitted by upstream + pipelines. + :type run_dimension: str """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, - 'max_concurrent_connections': {'minimum': 1}, - 'dataset': {'required': True}, + 'runtime_state': {'readonly': True}, + 'pipeline': {'required': True}, + 'depends_on': {'required': True}, + 'run_dimension': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'recursive': {'key': 'typeProperties.recursive', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'typeProperties.maxConcurrentConnections', 'type': 'int'}, - 'enable_logging': {'key': 'typeProperties.enableLogging', 'type': 'object'}, - 'log_storage_settings': {'key': 'typeProperties.logStorageSettings', 'type': 'LogStorageSettings'}, - 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'pipeline': {'key': 'pipeline', 'type': 'TriggerPipelineReference'}, + 'depends_on': {'key': 'typeProperties.dependsOn', 'type': '[PipelineReference]'}, + 'run_dimension': {'key': 'typeProperties.runDimension', 'type': 'str'}, } def __init__( self, **kwargs ): - super(DeleteActivity, self).__init__(**kwargs) - self.type = 'Delete' - self.recursive = kwargs.get('recursive', None) - self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) - self.enable_logging = kwargs.get('enable_logging', None) - self.log_storage_settings = kwargs.get('log_storage_settings', None) - self.dataset = kwargs['dataset'] + super(ChainingTrigger, self).__init__(**kwargs) + self.type = 'ChainingTrigger' # type: str + self.pipeline = kwargs['pipeline'] + self.depends_on = kwargs['depends_on'] + self.run_dimension = kwargs['run_dimension'] -class DeleteDataFlowDebugSessionRequest(msrest.serialization.Model): - """Request body structure for deleting data flow debug session. +class CloudError(msrest.serialization.Model): + """The object that defines the structure of an Azure Synapse error response. - :param session_id: The ID of data flow debug session. - :type session_id: str - :param data_flow_name: The data flow which contains the debug session. - :type data_flow_name: str + All required parameters must be populated in order to send to Azure. + + :param code: Required. Error code. + :type code: str + :param message: Required. Error message. + :type message: str + :param target: Property name/path in request associated with error. + :type target: str + :param details: Array with additional error details. + :type details: list[~azure.synapse.artifacts.models.CloudError] """ + _validation = { + 'code': {'required': True}, + 'message': {'required': True}, + } + _attribute_map = { - 'session_id': {'key': 'sessionId', 'type': 'str'}, - 'data_flow_name': {'key': 'dataFlowName', 'type': 'str'}, + 'code': {'key': 'error.code', 'type': 'str'}, + 'message': {'key': 'error.message', 'type': 'str'}, + 'target': {'key': 'error.target', 'type': 'str'}, + 'details': {'key': 'error.details', 'type': '[CloudError]'}, } def __init__( self, **kwargs ): - super(DeleteDataFlowDebugSessionRequest, self).__init__(**kwargs) - self.session_id = kwargs.get('session_id', None) - self.data_flow_name = kwargs.get('data_flow_name', None) + super(CloudError, self).__init__(**kwargs) + self.code = kwargs['code'] + self.message = kwargs['message'] + self.target = kwargs.get('target', None) + self.details = kwargs.get('details', None) -class DelimitedTextDataset(Dataset): - """Delimited text dataset. +class CommonDataServiceForAppsEntityDataset(Dataset): + """The Common Data Service for Apps entity dataset. All required parameters must be populated in order to send to Azure. @@ -6515,35 +6893,9 @@ class DelimitedTextDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param location: The location of the delimited text storage. - :type location: ~azure.synapse.artifacts.models.DatasetLocation - :param column_delimiter: The column delimiter. Type: string (or Expression with resultType + :param entity_name: The logical name of the entity. Type: string (or Expression with resultType string). - :type column_delimiter: object - :param row_delimiter: The row delimiter. Type: string (or Expression with resultType string). - :type row_delimiter: object - :param encoding_name: The code page name of the preferred encoding. If miss, the default value - is UTF-8, unless BOM denotes another Unicode encoding. Refer to the name column of the table in - the following link to set supported values: - https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with - resultType string). - :type encoding_name: object - :param compression_codec: Possible values include: "bzip2", "gzip", "deflate", "zipDeflate", - "snappy", "lz4". - :type compression_codec: str or ~azure.synapse.artifacts.models.DelimitedTextCompressionCodec - :param compression_level: The data compression method used for DelimitedText. Possible values - include: "Optimal", "Fastest". - :type compression_level: str or ~azure.synapse.artifacts.models.DatasetCompressionLevel - :param quote_char: The quote character. Type: string (or Expression with resultType string). - :type quote_char: object - :param escape_char: The escape character. Type: string (or Expression with resultType string). - :type escape_char: object - :param first_row_as_header: When used as input, treat the first row of data as headers. When - used as output,write the headers into the output as the first row of data. The default value is - false. Type: boolean (or Expression with resultType boolean). - :type first_row_as_header: object - :param null_value: The null value string. Type: string (or Expression with resultType string). - :type null_value: object + :type entity_name: object """ _validation = { @@ -6561,98 +6913,251 @@ class DelimitedTextDataset(Dataset): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'column_delimiter': {'key': 'typeProperties.columnDelimiter', 'type': 'object'}, - 'row_delimiter': {'key': 'typeProperties.rowDelimiter', 'type': 'object'}, - 'encoding_name': {'key': 'typeProperties.encodingName', 'type': 'object'}, - 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'str'}, - 'compression_level': {'key': 'typeProperties.compressionLevel', 'type': 'str'}, - 'quote_char': {'key': 'typeProperties.quoteChar', 'type': 'object'}, - 'escape_char': {'key': 'typeProperties.escapeChar', 'type': 'object'}, - 'first_row_as_header': {'key': 'typeProperties.firstRowAsHeader', 'type': 'object'}, - 'null_value': {'key': 'typeProperties.nullValue', 'type': 'object'}, + 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, } def __init__( self, **kwargs ): - super(DelimitedTextDataset, self).__init__(**kwargs) - self.type = 'DelimitedText' - self.location = kwargs.get('location', None) - self.column_delimiter = kwargs.get('column_delimiter', None) - self.row_delimiter = kwargs.get('row_delimiter', None) - self.encoding_name = kwargs.get('encoding_name', None) - self.compression_codec = kwargs.get('compression_codec', None) - self.compression_level = kwargs.get('compression_level', None) - self.quote_char = kwargs.get('quote_char', None) - self.escape_char = kwargs.get('escape_char', None) - self.first_row_as_header = kwargs.get('first_row_as_header', None) - self.null_value = kwargs.get('null_value', None) + super(CommonDataServiceForAppsEntityDataset, self).__init__(**kwargs) + self.type = 'CommonDataServiceForAppsEntity' # type: str + self.entity_name = kwargs.get('entity_name', None) -class DocumentDbCollectionDataset(Dataset): - """Microsoft Azure Document Database Collection dataset. +class CommonDataServiceForAppsLinkedService(LinkedService): + """Common Data Service for Apps linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Type of linked service.Constant filled by server. :type type: str - :param description: Dataset description. + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. + :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. + :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param collection_name: Required. Document Database collection name. Type: string (or - Expression with resultType string). - :type collection_name: object + :param deployment_type: Required. The deployment type of the Common Data Service for Apps + instance. 'Online' for Common Data Service for Apps Online and 'OnPremisesWithIfd' for Common + Data Service for Apps on-premises with Ifd. Type: string (or Expression with resultType + string). Possible values include: "Online", "OnPremisesWithIfd". + :type deployment_type: str or ~azure.synapse.artifacts.models.DynamicsDeploymentType + :param host_name: The host name of the on-premises Common Data Service for Apps server. The + property is required for on-prem and not allowed for online. Type: string (or Expression with + resultType string). + :type host_name: object + :param port: The port of on-premises Common Data Service for Apps server. The property is + required for on-prem and not allowed for online. Default is 443. Type: integer (or Expression + with resultType integer), minimum: 0. + :type port: object + :param service_uri: The URL to the Microsoft Common Data Service for Apps server. The property + is required for on-line and not allowed for on-prem. Type: string (or Expression with + resultType string). + :type service_uri: object + :param organization_name: The organization name of the Common Data Service for Apps instance. + The property is required for on-prem and required for online when there are more than one + Common Data Service for Apps instances associated with the user. Type: string (or Expression + with resultType string). + :type organization_name: object + :param authentication_type: Required. The authentication type to connect to Common Data Service + for Apps server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario. + 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or + Expression with resultType string). Possible values include: "Office365", "Ifd", + "AADServicePrincipal". + :type authentication_type: str or ~azure.synapse.artifacts.models.DynamicsAuthenticationType + :param username: User name to access the Common Data Service for Apps instance. Type: string + (or Expression with resultType string). + :type username: object + :param password: Password to access the Common Data Service for Apps instance. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param service_principal_id: The client ID of the application in Azure Active Directory used + for Server-To-Server authentication. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_credential_type: The service principal credential type to use in + Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' + for certificate. Type: string (or Expression with resultType string). Possible values include: + "ServicePrincipalKey", "ServicePrincipalCert". + :type service_principal_credential_type: str or + ~azure.synapse.artifacts.models.DynamicsServicePrincipalCredentialType + :param service_principal_credential: The credential of the service principal object in Azure + Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', + servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If + servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only + be AzureKeyVaultSecretReference. + :type service_principal_credential: ~azure.synapse.artifacts.models.SecretBase + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, - 'collection_name': {'required': True}, + 'deployment_type': {'required': True}, + 'authentication_type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'collection_name': {'key': 'typeProperties.collectionName', 'type': 'object'}, + 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, + 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, + 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'str'}, + 'service_principal_credential': {'key': 'typeProperties.servicePrincipalCredential', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, **kwargs ): - super(DocumentDbCollectionDataset, self).__init__(**kwargs) - self.type = 'DocumentDbCollection' - self.collection_name = kwargs['collection_name'] + super(CommonDataServiceForAppsLinkedService, self).__init__(**kwargs) + self.type = 'CommonDataServiceForApps' # type: str + self.deployment_type = kwargs['deployment_type'] + self.host_name = kwargs.get('host_name', None) + self.port = kwargs.get('port', None) + self.service_uri = kwargs.get('service_uri', None) + self.organization_name = kwargs.get('organization_name', None) + self.authentication_type = kwargs['authentication_type'] + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_credential_type = kwargs.get('service_principal_credential_type', None) + self.service_principal_credential = kwargs.get('service_principal_credential', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) -class DrillLinkedService(LinkedService): - """Drill server linked service. +class CommonDataServiceForAppsSink(CopySink): + """A copy activity Common Data Service for Apps sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param write_behavior: Required. The write behavior for the operation. Possible values include: + "Upsert". + :type write_behavior: str or ~azure.synapse.artifacts.models.DynamicsSinkWriteBehavior + :param ignore_null_values: The flag indicating whether to ignore null values from input dataset + (except key fields) during write operation. Default is false. Type: boolean (or Expression with + resultType boolean). + :type ignore_null_values: object + :param alternate_key_name: The logical name of the alternate key which will be used when + upserting records. Type: string (or Expression with resultType string). + :type alternate_key_name: object + """ + + _validation = { + 'type': {'required': True}, + 'write_behavior': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, + 'alternate_key_name': {'key': 'alternateKeyName', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(CommonDataServiceForAppsSink, self).__init__(**kwargs) + self.type = 'CommonDataServiceForAppsSink' # type: str + self.write_behavior = kwargs['write_behavior'] + self.ignore_null_values = kwargs.get('ignore_null_values', None) + self.alternate_key_name = kwargs.get('alternate_key_name', None) + + +class CommonDataServiceForAppsSource(CopySource): + """A copy activity Common Data Service for Apps source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query: FetchXML is a proprietary query language that is used in Microsoft Common Data + Service for Apps (online & on-premises). Type: string (or Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(CommonDataServiceForAppsSource, self).__init__(**kwargs) + self.type = 'CommonDataServiceForAppsSource' # type: str + self.query = kwargs.get('query', None) + + +class ConcurLinkedService(LinkedService): + """Concur Service linked service. All required parameters must be populated in order to send to Azure. @@ -6669,11 +7174,23 @@ class DrillLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param connection_string: An ODBC connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param pwd: The Azure key vault secret reference of password in connection string. - :type pwd: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param client_id: Required. Application client_id supplied by Concur App Management. + :type client_id: object + :param username: Required. The user name that you use to access Concur Service. + :type username: object + :param password: The password corresponding to the user name that you provided in the username + field. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using + HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name in the server's + certificate to match the host name of the server when connecting over SSL. The default value is + true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of the server when + connecting over SSL. The default value is true. + :type use_peer_verification: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -6682,6 +7199,8 @@ class DrillLinkedService(LinkedService): _validation = { 'type': {'required': True}, + 'client_id': {'required': True}, + 'username': {'required': True}, } _attribute_map = { @@ -6691,8 +7210,12 @@ class DrillLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -6700,15 +7223,19 @@ def __init__( self, **kwargs ): - super(DrillLinkedService, self).__init__(**kwargs) - self.type = 'Drill' - self.connection_string = kwargs.get('connection_string', None) - self.pwd = kwargs.get('pwd', None) + super(ConcurLinkedService, self).__init__(**kwargs) + self.type = 'Concur' # type: str + self.client_id = kwargs['client_id'] + self.username = kwargs['username'] + self.password = kwargs.get('password', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) -class DrillTableDataset(Dataset): - """Drill server dataset. +class ConcurObjectDataset(Dataset): + """Concur Service dataset. All required parameters must be populated in order to send to Azure. @@ -6734,14 +7261,8 @@ class DrillTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table - properties instead. + :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object - :param table: The table name of the Drill. Type: string (or Expression with resultType string). - :type table: object - :param schema_type_properties_schema: The schema name of the Drill. Type: string (or Expression - with resultType string). - :type schema_type_properties_schema: object """ _validation = { @@ -6760,221 +7281,257 @@ class DrillTableDataset(Dataset): 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } def __init__( self, **kwargs ): - super(DrillTableDataset, self).__init__(**kwargs) - self.type = 'DrillTable' + super(ConcurObjectDataset, self).__init__(**kwargs) + self.type = 'ConcurObject' # type: str self.table_name = kwargs.get('table_name', None) - self.table = kwargs.get('table', None) - self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) -class DynamicsAXLinkedService(LinkedService): - """Dynamics AX linked service. +class ConcurSource(TabularSource): + """A copy activity Concur Service source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param url: Required. The Dynamics AX (or Dynamics 365 Finance and Operations) instance OData - endpoint. - :type url: object - :param service_principal_id: Required. Specify the application's client ID. Type: string (or - Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: Required. Specify the application's key. Mark this field as a - SecureString to store it securely in Data Factory, or reference a secret stored in Azure Key - Vault. Type: string (or Expression with resultType string). - :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase - :param tenant: Required. Specify the tenant information (domain name or tenant ID) under which - your application resides. Retrieve it by hovering the mouse in the top-right corner of the - Azure portal. Type: string (or Expression with resultType string). - :type tenant: object - :param aad_resource_id: Required. Specify the resource you are requesting authorization. Type: - string (or Expression with resultType string). - :type aad_resource_id: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object """ _validation = { 'type': {'required': True}, - 'url': {'required': True}, - 'service_principal_id': {'required': True}, - 'service_principal_key': {'required': True}, - 'tenant': {'required': True}, - 'aad_resource_id': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, } def __init__( self, **kwargs ): - super(DynamicsAXLinkedService, self).__init__(**kwargs) - self.type = 'DynamicsAX' - self.url = kwargs['url'] - self.service_principal_id = kwargs['service_principal_id'] - self.service_principal_key = kwargs['service_principal_key'] - self.tenant = kwargs['tenant'] - self.aad_resource_id = kwargs['aad_resource_id'] - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(ConcurSource, self).__init__(**kwargs) + self.type = 'ConcurSource' # type: str + self.query = kwargs.get('query', None) -class DynamicsAXResourceDataset(Dataset): - """The path of the Dynamics AX OData entity. +class ControlActivity(Activity): + """Base class for all control activities like IfCondition, ForEach , Until. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. :type type: str - :param description: Dataset description. + :param description: Activity description. :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param path: Required. The path of the Dynamics AX OData entity. Type: string (or Expression - with resultType string). - :type path: object + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] """ _validation = { + 'name': {'required': True}, 'type': {'required': True}, - 'linked_service_name': {'required': True}, - 'path': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'path': {'key': 'typeProperties.path', 'type': 'object'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, } def __init__( self, **kwargs ): - super(DynamicsAXResourceDataset, self).__init__(**kwargs) - self.type = 'DynamicsAXResource' - self.path = kwargs['path'] + super(ControlActivity, self).__init__(**kwargs) + self.type = 'Container' # type: str -class DynamicsCrmEntityDataset(Dataset): - """The Dynamics CRM entity dataset. +class CopyActivity(ExecutionActivity): + """Copy activity. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. :type type: str - :param description: Dataset description. + :param description: Activity description. :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param entity_name: The logical name of the entity. Type: string (or Expression with resultType - string). - :type entity_name: object + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param inputs: List of inputs for the activity. + :type inputs: list[~azure.synapse.artifacts.models.DatasetReference] + :param outputs: List of outputs for the activity. + :type outputs: list[~azure.synapse.artifacts.models.DatasetReference] + :param source: Required. Copy activity source. + :type source: ~azure.synapse.artifacts.models.CopySource + :param sink: Required. Copy activity sink. + :type sink: ~azure.synapse.artifacts.models.CopySink + :param translator: Copy activity translator. If not specified, tabular translator is used. + :type translator: object + :param enable_staging: Specifies whether to copy data via an interim staging. Default value is + false. Type: boolean (or Expression with resultType boolean). + :type enable_staging: object + :param staging_settings: Specifies interim staging settings when EnableStaging is true. + :type staging_settings: ~azure.synapse.artifacts.models.StagingSettings + :param parallel_copies: Maximum number of concurrent sessions opened on the source or sink to + avoid overloading the data store. Type: integer (or Expression with resultType integer), + minimum: 0. + :type parallel_copies: object + :param data_integration_units: Maximum number of data integration units that can be used to + perform this data movement. Type: integer (or Expression with resultType integer), minimum: 0. + :type data_integration_units: object + :param enable_skip_incompatible_row: Whether to skip incompatible row. Default value is false. + Type: boolean (or Expression with resultType boolean). + :type enable_skip_incompatible_row: object + :param redirect_incompatible_row_settings: Redirect incompatible row settings when + EnableSkipIncompatibleRow is true. + :type redirect_incompatible_row_settings: + ~azure.synapse.artifacts.models.RedirectIncompatibleRowSettings + :param preserve_rules: Preserve Rules. + :type preserve_rules: list[object] + :param preserve: Preserve rules. + :type preserve: list[object] """ _validation = { + 'name': {'required': True}, 'type': {'required': True}, - 'linked_service_name': {'required': True}, + 'source': {'required': True}, + 'sink': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'inputs': {'key': 'inputs', 'type': '[DatasetReference]'}, + 'outputs': {'key': 'outputs', 'type': '[DatasetReference]'}, + 'source': {'key': 'typeProperties.source', 'type': 'CopySource'}, + 'sink': {'key': 'typeProperties.sink', 'type': 'CopySink'}, + 'translator': {'key': 'typeProperties.translator', 'type': 'object'}, + 'enable_staging': {'key': 'typeProperties.enableStaging', 'type': 'object'}, + 'staging_settings': {'key': 'typeProperties.stagingSettings', 'type': 'StagingSettings'}, + 'parallel_copies': {'key': 'typeProperties.parallelCopies', 'type': 'object'}, + 'data_integration_units': {'key': 'typeProperties.dataIntegrationUnits', 'type': 'object'}, + 'enable_skip_incompatible_row': {'key': 'typeProperties.enableSkipIncompatibleRow', 'type': 'object'}, + 'redirect_incompatible_row_settings': {'key': 'typeProperties.redirectIncompatibleRowSettings', 'type': 'RedirectIncompatibleRowSettings'}, + 'preserve_rules': {'key': 'typeProperties.preserveRules', 'type': '[object]'}, + 'preserve': {'key': 'typeProperties.preserve', 'type': '[object]'}, } def __init__( self, **kwargs ): - super(DynamicsCrmEntityDataset, self).__init__(**kwargs) - self.type = 'DynamicsCrmEntity' - self.entity_name = kwargs.get('entity_name', None) + super(CopyActivity, self).__init__(**kwargs) + self.type = 'Copy' # type: str + self.inputs = kwargs.get('inputs', None) + self.outputs = kwargs.get('outputs', None) + self.source = kwargs['source'] + self.sink = kwargs['sink'] + self.translator = kwargs.get('translator', None) + self.enable_staging = kwargs.get('enable_staging', None) + self.staging_settings = kwargs.get('staging_settings', None) + self.parallel_copies = kwargs.get('parallel_copies', None) + self.data_integration_units = kwargs.get('data_integration_units', None) + self.enable_skip_incompatible_row = kwargs.get('enable_skip_incompatible_row', None) + self.redirect_incompatible_row_settings = kwargs.get('redirect_incompatible_row_settings', None) + self.preserve_rules = kwargs.get('preserve_rules', None) + self.preserve = kwargs.get('preserve', None) -class DynamicsCrmLinkedService(LinkedService): - """Dynamics CRM linked service. +class CopyTranslator(msrest.serialization.Model): + """A copy activity translator. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: TabularTranslator. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy translator type.Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'TabularTranslator': 'TabularTranslator'} + } + + def __init__( + self, + **kwargs + ): + super(CopyTranslator, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = 'CopyTranslator' # type: str + + +class CosmosDbLinkedService(LinkedService): + """Microsoft Azure Cosmos Database (CosmosDB) linked service. All required parameters must be populated in order to send to Azure. @@ -6991,52 +7548,17 @@ class DynamicsCrmLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param deployment_type: Required. The deployment type of the Dynamics CRM instance. 'Online' - for Dynamics CRM Online and 'OnPremisesWithIfd' for Dynamics CRM on-premises with Ifd. Type: - string (or Expression with resultType string). Possible values include: "Online", - "OnPremisesWithIfd". - :type deployment_type: str or ~azure.synapse.artifacts.models.DynamicsDeploymentType - :param host_name: The host name of the on-premises Dynamics CRM server. The property is - required for on-prem and not allowed for online. Type: string (or Expression with resultType - string). - :type host_name: object - :param port: The port of on-premises Dynamics CRM server. The property is required for on-prem - and not allowed for online. Default is 443. Type: integer (or Expression with resultType - integer), minimum: 0. - :type port: object - :param service_uri: The URL to the Microsoft Dynamics CRM server. The property is required for - on-line and not allowed for on-prem. Type: string (or Expression with resultType string). - :type service_uri: object - :param organization_name: The organization name of the Dynamics CRM instance. The property is - required for on-prem and required for online when there are more than one Dynamics CRM - instances associated with the user. Type: string (or Expression with resultType string). - :type organization_name: object - :param authentication_type: Required. The authentication type to connect to Dynamics CRM - server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, - 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or - Expression with resultType string). Possible values include: "Office365", "Ifd", - "AADServicePrincipal". - :type authentication_type: str or ~azure.synapse.artifacts.models.DynamicsAuthenticationType - :param username: User name to access the Dynamics CRM instance. Type: string (or Expression - with resultType string). - :type username: object - :param password: Password to access the Dynamics CRM instance. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param service_principal_id: The client ID of the application in Azure Active Directory used - for Server-To-Server authentication. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_credential_type: The service principal credential type to use in - Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' - for certificate. Type: string (or Expression with resultType string). Possible values include: - "ServicePrincipalKey", "ServicePrincipalCert". - :type service_principal_credential_type: str or - ~azure.synapse.artifacts.models.DynamicsServicePrincipalCredentialType - :param service_principal_credential: The credential of the service principal object in Azure - Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', - servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If - servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only - be AzureKeyVaultSecretReference. - :type service_principal_credential: ~azure.synapse.artifacts.models.SecretBase + :param connection_string: The connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param account_endpoint: The endpoint of the Azure CosmosDB account. Type: string (or + Expression with resultType string). + :type account_endpoint: object + :param database: The name of the database. Type: string (or Expression with resultType string). + :type database: object + :param account_key: The account key of the Azure CosmosDB account. Type: SecureString or + AzureKeyVaultSecretReference. + :type account_key: ~azure.synapse.artifacts.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -7045,8 +7567,6 @@ class DynamicsCrmLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'deployment_type': {'required': True}, - 'authentication_type': {'required': True}, } _attribute_map = { @@ -7056,17 +7576,10 @@ class DynamicsCrmLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, - 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, - 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'str'}, - 'service_principal_credential': {'key': 'typeProperties.servicePrincipalCredential', 'type': 'SecretBase'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'account_endpoint': {'key': 'typeProperties.accountEndpoint', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'account_key': {'key': 'typeProperties.accountKey', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -7074,24 +7587,17 @@ def __init__( self, **kwargs ): - super(DynamicsCrmLinkedService, self).__init__(**kwargs) - self.type = 'DynamicsCrm' - self.deployment_type = kwargs['deployment_type'] - self.host_name = kwargs.get('host_name', None) - self.port = kwargs.get('port', None) - self.service_uri = kwargs.get('service_uri', None) - self.organization_name = kwargs.get('organization_name', None) - self.authentication_type = kwargs['authentication_type'] - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_credential_type = kwargs.get('service_principal_credential_type', None) - self.service_principal_credential = kwargs.get('service_principal_credential', None) + super(CosmosDbLinkedService, self).__init__(**kwargs) + self.type = 'CosmosDb' # type: str + self.connection_string = kwargs.get('connection_string', None) + self.account_endpoint = kwargs.get('account_endpoint', None) + self.database = kwargs.get('database', None) + self.account_key = kwargs.get('account_key', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) -class DynamicsEntityDataset(Dataset): - """The Dynamics entity dataset. +class CosmosDbMongoDbApiCollectionDataset(Dataset): + """The CosmosDB (MongoDB API) database dataset. All required parameters must be populated in order to send to Azure. @@ -7117,14 +7623,15 @@ class DynamicsEntityDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param entity_name: The logical name of the entity. Type: string (or Expression with resultType - string). - :type entity_name: object + :param collection: Required. The collection name of the CosmosDB (MongoDB API) database. Type: + string (or Expression with resultType string). + :type collection: object """ _validation = { 'type': {'required': True}, 'linked_service_name': {'required': True}, + 'collection': {'required': True}, } _attribute_map = { @@ -7137,20 +7644,20 @@ class DynamicsEntityDataset(Dataset): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, + 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, } def __init__( self, **kwargs ): - super(DynamicsEntityDataset, self).__init__(**kwargs) - self.type = 'DynamicsEntity' - self.entity_name = kwargs.get('entity_name', None) + super(CosmosDbMongoDbApiCollectionDataset, self).__init__(**kwargs) + self.type = 'CosmosDbMongoDbApiCollection' # type: str + self.collection = kwargs['collection'] -class DynamicsLinkedService(LinkedService): - """Dynamics linked service. +class CosmosDbMongoDbApiLinkedService(LinkedService): + """Linked service for CosmosDB (MongoDB API) data source. All required parameters must be populated in order to send to Azure. @@ -7167,59 +7674,19 @@ class DynamicsLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param deployment_type: Required. The deployment type of the Dynamics instance. 'Online' for - Dynamics Online and 'OnPremisesWithIfd' for Dynamics on-premises with Ifd. Type: string (or - Expression with resultType string). Possible values include: "Online", "OnPremisesWithIfd". - :type deployment_type: str or ~azure.synapse.artifacts.models.DynamicsDeploymentType - :param host_name: The host name of the on-premises Dynamics server. The property is required - for on-prem and not allowed for online. Type: string (or Expression with resultType string). - :type host_name: str - :param port: The port of on-premises Dynamics server. The property is required for on-prem and - not allowed for online. Default is 443. Type: integer (or Expression with resultType integer), - minimum: 0. - :type port: str - :param service_uri: The URL to the Microsoft Dynamics server. The property is required for on- - line and not allowed for on-prem. Type: string (or Expression with resultType string). - :type service_uri: str - :param organization_name: The organization name of the Dynamics instance. The property is - required for on-prem and required for online when there are more than one Dynamics instances - associated with the user. Type: string (or Expression with resultType string). - :type organization_name: str - :param authentication_type: Required. The authentication type to connect to Dynamics server. - 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, 'AADServicePrincipal' - for Server-To-Server authentication in online scenario. Type: string (or Expression with - resultType string). Possible values include: "Office365", "Ifd", "AADServicePrincipal". - :type authentication_type: str or ~azure.synapse.artifacts.models.DynamicsAuthenticationType - :param username: User name to access the Dynamics instance. Type: string (or Expression with - resultType string). - :type username: object - :param password: Password to access the Dynamics instance. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param service_principal_id: The client ID of the application in Azure Active Directory used - for Server-To-Server authentication. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_credential_type: The service principal credential type to use in - Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' - for certificate. Type: string (or Expression with resultType string). Possible values include: - "ServicePrincipalKey", "ServicePrincipalCert". - :type service_principal_credential_type: str or - ~azure.synapse.artifacts.models.DynamicsServicePrincipalCredentialType - :param service_principal_credential: The credential of the service principal object in Azure - Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', - servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If - servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only - be AzureKeyVaultSecretReference. - :type service_principal_credential: ~azure.synapse.artifacts.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param connection_string: Required. The CosmosDB (MongoDB API) connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param database: Required. The name of the CosmosDB (MongoDB API) database that you want to + access. Type: string (or Expression with resultType string). + :type database: object """ _validation = { 'type': {'required': True}, - 'deployment_type': {'required': True}, - 'authentication_type': {'required': True}, + 'connection_string': {'required': True}, + 'database': {'required': True}, } _attribute_map = { @@ -7229,120 +7696,140 @@ class DynamicsLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, - 'host_name': {'key': 'typeProperties.hostName', 'type': 'str'}, - 'port': {'key': 'typeProperties.port', 'type': 'str'}, - 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'str'}, - 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'str'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'str'}, - 'service_principal_credential': {'key': 'typeProperties.servicePrincipalCredential', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, } def __init__( self, **kwargs ): - super(DynamicsLinkedService, self).__init__(**kwargs) - self.type = 'Dynamics' - self.deployment_type = kwargs['deployment_type'] - self.host_name = kwargs.get('host_name', None) - self.port = kwargs.get('port', None) - self.service_uri = kwargs.get('service_uri', None) - self.organization_name = kwargs.get('organization_name', None) - self.authentication_type = kwargs['authentication_type'] - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_credential_type = kwargs.get('service_principal_credential_type', None) - self.service_principal_credential = kwargs.get('service_principal_credential', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(CosmosDbMongoDbApiLinkedService, self).__init__(**kwargs) + self.type = 'CosmosDbMongoDbApi' # type: str + self.connection_string = kwargs['connection_string'] + self.database = kwargs['database'] -class EloquaLinkedService(LinkedService): - """Eloqua server linked service. +class CosmosDbMongoDbApiSink(CopySink): + """A copy activity sink for a CosmosDB (MongoDB API) database. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Copy sink type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param endpoint: Required. The endpoint of the Eloqua server. (i.e. eloqua.example.com). - :type endpoint: object - :param username: Required. The site name and user name of your Eloqua account in the form: - sitename/username. (i.e. Eloqua/Alice). - :type username: object - :param password: The password corresponding to the user name. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using - HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name in the server's - certificate to match the host name of the server when connecting over SSL. The default value is - true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of the server when - connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param write_behavior: Specifies whether the document with same key to be overwritten (upsert) + rather than throw exception (insert). The default value is "insert". Type: string (or + Expression with resultType string). Type: string (or Expression with resultType string). + :type write_behavior: object """ _validation = { 'type': {'required': True}, - 'endpoint': {'required': True}, - 'username': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, } def __init__( self, **kwargs ): - super(EloquaLinkedService, self).__init__(**kwargs) - self.type = 'Eloqua' - self.endpoint = kwargs['endpoint'] - self.username = kwargs['username'] - self.password = kwargs.get('password', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(CosmosDbMongoDbApiSink, self).__init__(**kwargs) + self.type = 'CosmosDbMongoDbApiSink' # type: str + self.write_behavior = kwargs.get('write_behavior', None) -class EloquaObjectDataset(Dataset): - """Eloqua server dataset. +class CosmosDbMongoDbApiSource(CopySource): + """A copy activity source for a CosmosDB (MongoDB API) database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param filter: Specifies selection filter using query operators. To return all documents in a + collection, omit this parameter or pass an empty document ({}). Type: string (or Expression + with resultType string). + :type filter: object + :param cursor_methods: Cursor methods for Mongodb query. + :type cursor_methods: ~azure.synapse.artifacts.models.MongoDbCursorMethodsProperties + :param batch_size: Specifies the number of documents to return in each batch of the response + from MongoDB instance. In most cases, modifying the batch size will not affect the user or the + application. This property's main purpose is to avoid hit the limitation of response size. + Type: integer (or Expression with resultType integer). + :type batch_size: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'filter': {'key': 'filter', 'type': 'object'}, + 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, + 'batch_size': {'key': 'batchSize', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(CosmosDbMongoDbApiSource, self).__init__(**kwargs) + self.type = 'CosmosDbMongoDbApiSource' # type: str + self.filter = kwargs.get('filter', None) + self.cursor_methods = kwargs.get('cursor_methods', None) + self.batch_size = kwargs.get('batch_size', None) + self.query_timeout = kwargs.get('query_timeout', None) + + +class CosmosDbSqlApiCollectionDataset(Dataset): + """Microsoft Azure CosmosDB (SQL API) Collection dataset. All required parameters must be populated in order to send to Azure. @@ -7368,13 +7855,15 @@ class EloquaObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :param collection_name: Required. CosmosDB (SQL API) collection name. Type: string (or + Expression with resultType string). + :type collection_name: object """ _validation = { 'type': {'required': True}, 'linked_service_name': {'required': True}, + 'collection_name': {'required': True}, } _attribute_map = { @@ -7387,566 +7876,535 @@ class EloquaObjectDataset(Dataset): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(EloquaObjectDataset, self).__init__(**kwargs) - self.type = 'EloquaObject' - self.table_name = kwargs.get('table_name', None) - - -class EvaluateDataFlowExpressionRequest(msrest.serialization.Model): - """Request body structure for data flow expression preview. - - :param session_id: The ID of data flow debug session. - :type session_id: str - :param data_flow_name: The data flow which contains the debug session. - :type data_flow_name: str - :param stream_name: The output stream name. - :type stream_name: str - :param row_limits: The row limit for preview request. - :type row_limits: int - :param expression: The expression for preview. - :type expression: str - """ - - _attribute_map = { - 'session_id': {'key': 'sessionId', 'type': 'str'}, - 'data_flow_name': {'key': 'dataFlowName', 'type': 'str'}, - 'stream_name': {'key': 'streamName', 'type': 'str'}, - 'row_limits': {'key': 'rowLimits', 'type': 'int'}, - 'expression': {'key': 'expression', 'type': 'str'}, + 'collection_name': {'key': 'typeProperties.collectionName', 'type': 'object'}, } def __init__( self, **kwargs ): - super(EvaluateDataFlowExpressionRequest, self).__init__(**kwargs) - self.session_id = kwargs.get('session_id', None) - self.data_flow_name = kwargs.get('data_flow_name', None) - self.stream_name = kwargs.get('stream_name', None) - self.row_limits = kwargs.get('row_limits', None) - self.expression = kwargs.get('expression', None) + super(CosmosDbSqlApiCollectionDataset, self).__init__(**kwargs) + self.type = 'CosmosDbSqlApiCollection' # type: str + self.collection_name = kwargs['collection_name'] -class ExecuteDataFlowActivity(ExecutionActivity): - """Execute data flow activity. +class CosmosDbSqlApiSink(CopySink): + """A copy activity Azure CosmosDB (SQL API) Collection sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. Copy sink type.Constant filled by server. :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param data_flow: Required. Data flow reference. - :type data_flow: ~azure.synapse.artifacts.models.DataFlowReference - :param staging: Staging info for execute data flow activity. - :type staging: ~azure.synapse.artifacts.models.DataFlowStagingInfo - :param integration_runtime: The integration runtime reference. - :type integration_runtime: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param compute: Compute properties for data flow activity. - :type compute: ~azure.synapse.artifacts.models.ExecuteDataFlowActivityTypePropertiesCompute + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param write_behavior: Describes how to write data to Azure Cosmos DB. Type: string (or + Expression with resultType string). Allowed values: insert and upsert. + :type write_behavior: object """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, - 'data_flow': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'data_flow': {'key': 'typeProperties.dataFlow', 'type': 'DataFlowReference'}, - 'staging': {'key': 'typeProperties.staging', 'type': 'DataFlowStagingInfo'}, - 'integration_runtime': {'key': 'typeProperties.integrationRuntime', 'type': 'IntegrationRuntimeReference'}, - 'compute': {'key': 'typeProperties.compute', 'type': 'ExecuteDataFlowActivityTypePropertiesCompute'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, } def __init__( self, **kwargs ): - super(ExecuteDataFlowActivity, self).__init__(**kwargs) - self.type = 'ExecuteDataFlow' - self.data_flow = kwargs['data_flow'] - self.staging = kwargs.get('staging', None) - self.integration_runtime = kwargs.get('integration_runtime', None) - self.compute = kwargs.get('compute', None) + super(CosmosDbSqlApiSink, self).__init__(**kwargs) + self.type = 'CosmosDbSqlApiSink' # type: str + self.write_behavior = kwargs.get('write_behavior', None) -class ExecuteDataFlowActivityTypePropertiesCompute(msrest.serialization.Model): - """Compute properties for data flow activity. +class CosmosDbSqlApiSource(CopySource): + """A copy activity Azure CosmosDB (SQL API) Collection source. - :param compute_type: Compute type of the cluster which will execute data flow job. Possible - values include: "General", "MemoryOptimized", "ComputeOptimized". - :type compute_type: str or ~azure.synapse.artifacts.models.DataFlowComputeType - :param core_count: Core count of the cluster which will execute data flow job. Supported values - are: 8, 16, 32, 48, 80, 144 and 272. - :type core_count: int + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query: SQL API query. Type: string (or Expression with resultType string). + :type query: object + :param page_size: Page size of the result. Type: integer (or Expression with resultType + integer). + :type page_size: object + :param preferred_regions: Preferred regions. Type: array of strings (or Expression with + resultType array of strings). + :type preferred_regions: object """ + _validation = { + 'type': {'required': True}, + } + _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'core_count': {'key': 'coreCount', 'type': 'int'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + 'page_size': {'key': 'pageSize', 'type': 'object'}, + 'preferred_regions': {'key': 'preferredRegions', 'type': 'object'}, } def __init__( self, **kwargs ): - super(ExecuteDataFlowActivityTypePropertiesCompute, self).__init__(**kwargs) - self.compute_type = kwargs.get('compute_type', None) - self.core_count = kwargs.get('core_count', None) + super(CosmosDbSqlApiSource, self).__init__(**kwargs) + self.type = 'CosmosDbSqlApiSource' # type: str + self.query = kwargs.get('query', None) + self.page_size = kwargs.get('page_size', None) + self.preferred_regions = kwargs.get('preferred_regions', None) -class ExecutePipelineActivity(Activity): - """Execute pipeline activity. +class CouchbaseLinkedService(LinkedService): + """Couchbase server linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. Type of linked service.Constant filled by server. :type type: str - :param description: Activity description. + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param pipeline: Required. Pipeline reference. - :type pipeline: ~azure.synapse.artifacts.models.PipelineReference - :param parameters: Pipeline parameters. - :type parameters: dict[str, object] - :param wait_on_completion: Defines whether activity execution will wait for the dependent - pipeline execution to finish. Default is false. - :type wait_on_completion: bool + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param connection_string: An ODBC connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param cred_string: The Azure key vault secret reference of credString in connection string. + :type cred_string: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, - 'pipeline': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'pipeline': {'key': 'typeProperties.pipeline', 'type': 'PipelineReference'}, - 'parameters': {'key': 'typeProperties.parameters', 'type': '{object}'}, - 'wait_on_completion': {'key': 'typeProperties.waitOnCompletion', 'type': 'bool'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'cred_string': {'key': 'typeProperties.credString', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, **kwargs ): - super(ExecutePipelineActivity, self).__init__(**kwargs) - self.type = 'ExecutePipeline' - self.pipeline = kwargs['pipeline'] - self.parameters = kwargs.get('parameters', None) - self.wait_on_completion = kwargs.get('wait_on_completion', None) + super(CouchbaseLinkedService, self).__init__(**kwargs) + self.type = 'Couchbase' # type: str + self.connection_string = kwargs.get('connection_string', None) + self.cred_string = kwargs.get('cred_string', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) -class ExecuteSSISPackageActivity(ExecutionActivity): - """Execute SSIS package activity. +class CouchbaseSource(TabularSource): + """A copy activity Couchbase server source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param description: Activity description. + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(CouchbaseSource, self).__init__(**kwargs) + self.type = 'CouchbaseSource' # type: str + self.query = kwargs.get('query', None) + + +class CouchbaseTableDataset(Dataset): + """Couchbase server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param package_location: Required. SSIS package location. - :type package_location: ~azure.synapse.artifacts.models.SSISPackageLocation - :param runtime: Specifies the runtime to execute SSIS package. The value should be "x86" or - "x64". Type: string (or Expression with resultType string). - :type runtime: object - :param logging_level: The logging level of SSIS package execution. Type: string (or Expression - with resultType string). - :type logging_level: object - :param environment_path: The environment path to execute the SSIS package. Type: string (or - Expression with resultType string). - :type environment_path: object - :param execution_credential: The package execution credential. - :type execution_credential: ~azure.synapse.artifacts.models.SSISExecutionCredential - :param connect_via: Required. The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param project_parameters: The project level parameters to execute the SSIS package. - :type project_parameters: dict[str, ~azure.synapse.artifacts.models.SSISExecutionParameter] - :param package_parameters: The package level parameters to execute the SSIS package. - :type package_parameters: dict[str, ~azure.synapse.artifacts.models.SSISExecutionParameter] - :param project_connection_managers: The project level connection managers to execute the SSIS - package. - :type project_connection_managers: dict[str, object] - :param package_connection_managers: The package level connection managers to execute the SSIS - package. - :type package_connection_managers: dict[str, object] - :param property_overrides: The property overrides to execute the SSIS package. - :type property_overrides: dict[str, ~azure.synapse.artifacts.models.SSISPropertyOverride] - :param log_location: SSIS package execution log location. - :type log_location: ~azure.synapse.artifacts.models.SSISLogLocation + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: The table name. Type: string (or Expression with resultType string). + :type table_name: object """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, - 'package_location': {'required': True}, - 'connect_via': {'required': True}, + 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'package_location': {'key': 'typeProperties.packageLocation', 'type': 'SSISPackageLocation'}, - 'runtime': {'key': 'typeProperties.runtime', 'type': 'object'}, - 'logging_level': {'key': 'typeProperties.loggingLevel', 'type': 'object'}, - 'environment_path': {'key': 'typeProperties.environmentPath', 'type': 'object'}, - 'execution_credential': {'key': 'typeProperties.executionCredential', 'type': 'SSISExecutionCredential'}, - 'connect_via': {'key': 'typeProperties.connectVia', 'type': 'IntegrationRuntimeReference'}, - 'project_parameters': {'key': 'typeProperties.projectParameters', 'type': '{SSISExecutionParameter}'}, - 'package_parameters': {'key': 'typeProperties.packageParameters', 'type': '{SSISExecutionParameter}'}, - 'project_connection_managers': {'key': 'typeProperties.projectConnectionManagers', 'type': '{object}'}, - 'package_connection_managers': {'key': 'typeProperties.packageConnectionManagers', 'type': '{object}'}, - 'property_overrides': {'key': 'typeProperties.propertyOverrides', 'type': '{SSISPropertyOverride}'}, - 'log_location': {'key': 'typeProperties.logLocation', 'type': 'SSISLogLocation'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } def __init__( self, **kwargs ): - super(ExecuteSSISPackageActivity, self).__init__(**kwargs) - self.type = 'ExecuteSSISPackage' - self.package_location = kwargs['package_location'] - self.runtime = kwargs.get('runtime', None) - self.logging_level = kwargs.get('logging_level', None) - self.environment_path = kwargs.get('environment_path', None) - self.execution_credential = kwargs.get('execution_credential', None) - self.connect_via = kwargs['connect_via'] - self.project_parameters = kwargs.get('project_parameters', None) - self.package_parameters = kwargs.get('package_parameters', None) - self.project_connection_managers = kwargs.get('project_connection_managers', None) - self.package_connection_managers = kwargs.get('package_connection_managers', None) - self.property_overrides = kwargs.get('property_overrides', None) - self.log_location = kwargs.get('log_location', None) + super(CouchbaseTableDataset, self).__init__(**kwargs) + self.type = 'CouchbaseTable' # type: str + self.table_name = kwargs.get('table_name', None) -class ExposureControlRequest(msrest.serialization.Model): - """The exposure control request. +class CreateDataFlowDebugSessionRequest(msrest.serialization.Model): + """Request body structure for creating data flow debug session. - :param feature_name: The feature name. - :type feature_name: str - :param feature_type: The feature type. - :type feature_type: str + :param data_flow_name: The name of the data flow. + :type data_flow_name: str + :param existing_cluster_id: The ID of existing Databricks cluster. + :type existing_cluster_id: str + :param cluster_timeout: Timeout setting for Databricks cluster. + :type cluster_timeout: int + :param new_cluster_name: The name of new Databricks cluster. + :type new_cluster_name: str + :param new_cluster_node_type: The type of new Databricks cluster. + :type new_cluster_node_type: str + :param data_bricks_linked_service: Data bricks linked service. + :type data_bricks_linked_service: ~azure.synapse.artifacts.models.LinkedServiceResource """ _attribute_map = { - 'feature_name': {'key': 'featureName', 'type': 'str'}, - 'feature_type': {'key': 'featureType', 'type': 'str'}, + 'data_flow_name': {'key': 'dataFlowName', 'type': 'str'}, + 'existing_cluster_id': {'key': 'existingClusterId', 'type': 'str'}, + 'cluster_timeout': {'key': 'clusterTimeout', 'type': 'int'}, + 'new_cluster_name': {'key': 'newClusterName', 'type': 'str'}, + 'new_cluster_node_type': {'key': 'newClusterNodeType', 'type': 'str'}, + 'data_bricks_linked_service': {'key': 'dataBricksLinkedService', 'type': 'LinkedServiceResource'}, } def __init__( self, **kwargs ): - super(ExposureControlRequest, self).__init__(**kwargs) - self.feature_name = kwargs.get('feature_name', None) - self.feature_type = kwargs.get('feature_type', None) - + super(CreateDataFlowDebugSessionRequest, self).__init__(**kwargs) + self.data_flow_name = kwargs.get('data_flow_name', None) + self.existing_cluster_id = kwargs.get('existing_cluster_id', None) + self.cluster_timeout = kwargs.get('cluster_timeout', None) + self.new_cluster_name = kwargs.get('new_cluster_name', None) + self.new_cluster_node_type = kwargs.get('new_cluster_node_type', None) + self.data_bricks_linked_service = kwargs.get('data_bricks_linked_service', None) -class ExposureControlResponse(msrest.serialization.Model): - """The exposure control response. - Variables are only populated by the server, and will be ignored when sending a request. +class CreateDataFlowDebugSessionResponse(msrest.serialization.Model): + """Response body structure for creating data flow debug session. - :ivar feature_name: The feature name. - :vartype feature_name: str - :ivar value: The feature value. - :vartype value: str + :param session_id: The ID of data flow debug session. + :type session_id: str """ - _validation = { - 'feature_name': {'readonly': True}, - 'value': {'readonly': True}, - } - _attribute_map = { - 'feature_name': {'key': 'featureName', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'str'}, + 'session_id': {'key': 'sessionId', 'type': 'str'}, } def __init__( self, **kwargs ): - super(ExposureControlResponse, self).__init__(**kwargs) - self.feature_name = None - self.value = None - + super(CreateDataFlowDebugSessionResponse, self).__init__(**kwargs) + self.session_id = kwargs.get('session_id', None) -class Expression(msrest.serialization.Model): - """Azure Synapse expression definition. - Variables are only populated by the server, and will be ignored when sending a request. +class CreateRunResponse(msrest.serialization.Model): + """Response body with a run identifier. All required parameters must be populated in order to send to Azure. - :ivar type: Required. Expression type. Default value: "Expression". - :vartype type: str - :param value: Required. Expression value. - :type value: str + :param run_id: Required. Identifier of a run. + :type run_id: str """ _validation = { - 'type': {'required': True, 'constant': True}, - 'value': {'required': True}, + 'run_id': {'required': True}, } _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'str'}, + 'run_id': {'key': 'runId', 'type': 'str'}, } - type = "Expression" - def __init__( self, **kwargs ): - super(Expression, self).__init__(**kwargs) - self.value = kwargs['value'] + super(CreateRunResponse, self).__init__(**kwargs) + self.run_id = kwargs['run_id'] -class FileServerLinkedService(LinkedService): - """File system linked service. +class CustomActivity(ExecutionActivity): + """Custom activity type. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. + :param description: Activity description. :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param host: Required. Host name of the server. Type: string (or Expression with resultType + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param linked_service_name: Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param command: Required. Command for custom activity Type: string (or Expression with + resultType string). + :type command: object + :param resource_linked_service: Resource linked service reference. + :type resource_linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference + :param folder_path: Folder path for resource files Type: string (or Expression with resultType string). - :type host: object - :param user_id: User ID to logon the server. Type: string (or Expression with resultType - string). - :type user_id: object - :param password: Password to logon the server. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :type folder_path: object + :param reference_objects: Reference objects. + :type reference_objects: ~azure.synapse.artifacts.models.CustomActivityReferenceObject + :param extended_properties: User defined property bag. There is no restriction on the keys or + values that can be used. The user specified custom activity has the full responsibility to + consume and interpret the content defined. + :type extended_properties: dict[str, object] + :param retention_time_in_days: The retention time for the files submitted for custom activity. + Type: double (or Expression with resultType double). + :type retention_time_in_days: object """ _validation = { + 'name': {'required': True}, 'type': {'required': True}, - 'host': {'required': True}, + 'command': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'user_id': {'key': 'typeProperties.userId', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'command': {'key': 'typeProperties.command', 'type': 'object'}, + 'resource_linked_service': {'key': 'typeProperties.resourceLinkedService', 'type': 'LinkedServiceReference'}, + 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, + 'reference_objects': {'key': 'typeProperties.referenceObjects', 'type': 'CustomActivityReferenceObject'}, + 'extended_properties': {'key': 'typeProperties.extendedProperties', 'type': '{object}'}, + 'retention_time_in_days': {'key': 'typeProperties.retentionTimeInDays', 'type': 'object'}, } def __init__( self, **kwargs ): - super(FileServerLinkedService, self).__init__(**kwargs) - self.type = 'FileServer' - self.host = kwargs['host'] - self.user_id = kwargs.get('user_id', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - + super(CustomActivity, self).__init__(**kwargs) + self.type = 'Custom' # type: str + self.command = kwargs['command'] + self.resource_linked_service = kwargs.get('resource_linked_service', None) + self.folder_path = kwargs.get('folder_path', None) + self.reference_objects = kwargs.get('reference_objects', None) + self.extended_properties = kwargs.get('extended_properties', None) + self.retention_time_in_days = kwargs.get('retention_time_in_days', None) -class FilterActivity(Activity): - """Filter and return results from input array based on the conditions. - All required parameters must be populated in order to send to Azure. +class CustomActivityReferenceObject(msrest.serialization.Model): + """Reference objects for custom activity. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param items: Required. Input array on which filter should be applied. - :type items: ~azure.synapse.artifacts.models.Expression - :param condition: Required. Condition to be used for filtering the input. - :type condition: ~azure.synapse.artifacts.models.Expression + :param linked_services: Linked service references. + :type linked_services: list[~azure.synapse.artifacts.models.LinkedServiceReference] + :param datasets: Dataset references. + :type datasets: list[~azure.synapse.artifacts.models.DatasetReference] """ - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'items': {'required': True}, - 'condition': {'required': True}, - } - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'items': {'key': 'typeProperties.items', 'type': 'Expression'}, - 'condition': {'key': 'typeProperties.condition', 'type': 'Expression'}, + 'linked_services': {'key': 'linkedServices', 'type': '[LinkedServiceReference]'}, + 'datasets': {'key': 'datasets', 'type': '[DatasetReference]'}, } def __init__( self, **kwargs ): - super(FilterActivity, self).__init__(**kwargs) - self.type = 'Filter' - self.items = kwargs['items'] - self.condition = kwargs['condition'] + super(CustomActivityReferenceObject, self).__init__(**kwargs) + self.linked_services = kwargs.get('linked_services', None) + self.datasets = kwargs.get('datasets', None) -class ForEachActivity(Activity): - """This activity is used for iterating over a collection and execute given activities. +class CustomDataset(Dataset): + """The custom dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. Type of dataset.Constant filled by server. :type type: str - :param description: Activity description. + :param description: Dataset description. :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param is_sequential: Should the loop be executed in sequence or in parallel (max 50). - :type is_sequential: bool - :param batch_count: Batch count to be used for controlling the number of parallel execution - (when isSequential is set to false). - :type batch_count: int - :param items: Required. Collection to iterate. - :type items: ~azure.synapse.artifacts.models.Expression - :param activities: Required. List of activities to execute . - :type activities: list[~azure.synapse.artifacts.models.Activity] + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param type_properties: Custom dataset properties. + :type type_properties: object """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, - 'batch_count': {'maximum': 50}, - 'items': {'required': True}, - 'activities': {'required': True}, + 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'is_sequential': {'key': 'typeProperties.isSequential', 'type': 'bool'}, - 'batch_count': {'key': 'typeProperties.batchCount', 'type': 'int'}, - 'items': {'key': 'typeProperties.items', 'type': 'Expression'}, - 'activities': {'key': 'typeProperties.activities', 'type': '[Activity]'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type_properties': {'key': 'typeProperties', 'type': 'object'}, } def __init__( self, **kwargs ): - super(ForEachActivity, self).__init__(**kwargs) - self.type = 'ForEach' - self.is_sequential = kwargs.get('is_sequential', None) - self.batch_count = kwargs.get('batch_count', None) - self.items = kwargs['items'] - self.activities = kwargs['activities'] + super(CustomDataset, self).__init__(**kwargs) + self.type = 'CustomDataset' # type: str + self.type_properties = kwargs.get('type_properties', None) -class FtpServerLinkedService(LinkedService): - """A FTP server Linked Service. +class CustomDataSourceLinkedService(LinkedService): + """Custom linked service. All required parameters must be populated in order to send to Azure. @@ -7963,36 +8421,13 @@ class FtpServerLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param host: Required. Host name of the FTP server. Type: string (or Expression with resultType - string). - :type host: object - :param port: The TCP port number that the FTP server uses to listen for client connections. - Default value is 21. Type: integer (or Expression with resultType integer), minimum: 0. - :type port: object - :param authentication_type: The authentication type to be used to connect to the FTP server. - Possible values include: "Basic", "Anonymous". - :type authentication_type: str or ~azure.synapse.artifacts.models.FtpAuthenticationType - :param user_name: Username to logon the FTP server. Type: string (or Expression with resultType - string). - :type user_name: object - :param password: Password to logon the FTP server. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - :param enable_ssl: If true, connect to the FTP server over SSL/TLS channel. Default value is - true. Type: boolean (or Expression with resultType boolean). - :type enable_ssl: object - :param enable_server_certificate_validation: If true, validate the FTP server SSL certificate - when connect over SSL/TLS channel. Default value is true. Type: boolean (or Expression with - resultType boolean). - :type enable_server_certificate_validation: object + :param type_properties: Required. Custom linked service properties. + :type type_properties: object """ _validation = { 'type': {'required': True}, - 'host': {'required': True}, + 'type_properties': {'required': True}, } _attribute_map = { @@ -8002,34 +8437,52 @@ class FtpServerLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, + 'type_properties': {'key': 'typeProperties', 'type': 'object'}, } def __init__( self, **kwargs ): - super(FtpServerLinkedService, self).__init__(**kwargs) - self.type = 'FtpServer' - self.host = kwargs['host'] - self.port = kwargs.get('port', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.enable_ssl = kwargs.get('enable_ssl', None) - self.enable_server_certificate_validation = kwargs.get('enable_server_certificate_validation', None) + super(CustomDataSourceLinkedService, self).__init__(**kwargs) + self.type = 'CustomDataSource' # type: str + self.type_properties = kwargs['type_properties'] -class GetMetadataActivity(ExecutionActivity): - """Activity to get metadata of dataset. +class CustomSetupBase(msrest.serialization.Model): + """The base definition of the custom setup. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: . + + All required parameters must be populated in order to send to Azure. + + :param type: Required. The type of custom setup.Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {} + } + + def __init__( + self, + **kwargs + ): + super(CustomSetupBase, self).__init__(**kwargs) + self.type = None # type: Optional[str] + + +class DatabricksNotebookActivity(ExecutionActivity): + """DatabricksNotebook activity. All required parameters must be populated in order to send to Azure. @@ -8050,16 +8503,21 @@ class GetMetadataActivity(ExecutionActivity): :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param dataset: Required. GetMetadata activity dataset reference. - :type dataset: ~azure.synapse.artifacts.models.DatasetReference - :param field_list: Fields of metadata to get from dataset. - :type field_list: list[object] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'dataset': {'required': True}, + :param notebook_path: Required. The absolute path of the notebook to be run in the Databricks + Workspace. This path must begin with a slash. Type: string (or Expression with resultType + string). + :type notebook_path: object + :param base_parameters: Base parameters to be used for each run of this job.If the notebook + takes a parameter that is not specified, the default value from the notebook will be used. + :type base_parameters: dict[str, object] + :param libraries: A list of libraries to be installed on the cluster that will execute the job. + :type libraries: list[dict[str, object]] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'notebook_path': {'required': True}, } _attribute_map = { @@ -8071,1507 +8529,10890 @@ class GetMetadataActivity(ExecutionActivity): 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, - 'field_list': {'key': 'typeProperties.fieldList', 'type': '[object]'}, + 'notebook_path': {'key': 'typeProperties.notebookPath', 'type': 'object'}, + 'base_parameters': {'key': 'typeProperties.baseParameters', 'type': '{object}'}, + 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, } def __init__( self, **kwargs ): - super(GetMetadataActivity, self).__init__(**kwargs) - self.type = 'GetMetadata' - self.dataset = kwargs['dataset'] - self.field_list = kwargs.get('field_list', None) + super(DatabricksNotebookActivity, self).__init__(**kwargs) + self.type = 'DatabricksNotebook' # type: str + self.notebook_path = kwargs['notebook_path'] + self.base_parameters = kwargs.get('base_parameters', None) + self.libraries = kwargs.get('libraries', None) -class GetSsisObjectMetadataRequest(msrest.serialization.Model): - """The request payload of get SSIS object metadata. +class DatabricksSparkJarActivity(ExecutionActivity): + """DatabricksSparkJar activity. - :param metadata_path: Metadata path. - :type metadata_path: str + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param linked_service_name: Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param main_class_name: Required. The full name of the class containing the main method to be + executed. This class must be contained in a JAR provided as a library. Type: string (or + Expression with resultType string). + :type main_class_name: object + :param parameters: Parameters that will be passed to the main method. + :type parameters: list[object] + :param libraries: A list of libraries to be installed on the cluster that will execute the job. + :type libraries: list[dict[str, object]] """ + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'main_class_name': {'required': True}, + } + _attribute_map = { - 'metadata_path': {'key': 'metadataPath', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'main_class_name': {'key': 'typeProperties.mainClassName', 'type': 'object'}, + 'parameters': {'key': 'typeProperties.parameters', 'type': '[object]'}, + 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, } def __init__( self, **kwargs ): - super(GetSsisObjectMetadataRequest, self).__init__(**kwargs) - self.metadata_path = kwargs.get('metadata_path', None) + super(DatabricksSparkJarActivity, self).__init__(**kwargs) + self.type = 'DatabricksSparkJar' # type: str + self.main_class_name = kwargs['main_class_name'] + self.parameters = kwargs.get('parameters', None) + self.libraries = kwargs.get('libraries', None) -class GoogleAdWordsLinkedService(LinkedService): - """Google AdWords service linked service. +class DatabricksSparkPythonActivity(ExecutionActivity): + """DatabricksSparkPython activity. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. + :param description: Activity description. :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param client_customer_id: Required. The Client customer ID of the AdWords account that you - want to fetch report data for. - :type client_customer_id: object - :param developer_token: Required. The developer token associated with the manager account that - you use to grant access to the AdWords API. - :type developer_token: ~azure.synapse.artifacts.models.SecretBase - :param authentication_type: Required. The OAuth 2.0 authentication mechanism used for - authentication. ServiceAuthentication can only be used on self-hosted IR. Possible values - include: "ServiceAuthentication", "UserAuthentication". - :type authentication_type: str or - ~azure.synapse.artifacts.models.GoogleAdWordsAuthenticationType - :param refresh_token: The refresh token obtained from Google for authorizing access to AdWords - for UserAuthentication. - :type refresh_token: ~azure.synapse.artifacts.models.SecretBase - :param client_id: The client id of the google application used to acquire the refresh token. - Type: string (or Expression with resultType string). - :type client_id: object - :param client_secret: The client secret of the google application used to acquire the refresh - token. - :type client_secret: ~azure.synapse.artifacts.models.SecretBase - :param email: The service account email ID that is used for ServiceAuthentication and can only - be used on self-hosted IR. - :type email: object - :param key_file_path: The full path to the .p12 key file that is used to authenticate the - service account email address and can only be used on self-hosted IR. - :type key_file_path: object - :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for - verifying the server when connecting over SSL. This property can only be set when using SSL on - self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust - store or from a specified PEM file. The default value is false. - :type use_system_trust_store: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param linked_service_name: Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param python_file: Required. The URI of the Python file to be executed. DBFS paths are + supported. Type: string (or Expression with resultType string). + :type python_file: object + :param parameters: Command line parameters that will be passed to the Python file. + :type parameters: list[object] + :param libraries: A list of libraries to be installed on the cluster that will execute the job. + :type libraries: list[dict[str, object]] """ _validation = { + 'name': {'required': True}, 'type': {'required': True}, - 'client_customer_id': {'required': True}, - 'developer_token': {'required': True}, - 'authentication_type': {'required': True}, + 'python_file': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'client_customer_id': {'key': 'typeProperties.clientCustomerID', 'type': 'object'}, - 'developer_token': {'key': 'typeProperties.developerToken', 'type': 'SecretBase'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'email': {'key': 'typeProperties.email', 'type': 'object'}, - 'key_file_path': {'key': 'typeProperties.keyFilePath', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'python_file': {'key': 'typeProperties.pythonFile', 'type': 'object'}, + 'parameters': {'key': 'typeProperties.parameters', 'type': '[object]'}, + 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, } def __init__( self, **kwargs ): - super(GoogleAdWordsLinkedService, self).__init__(**kwargs) - self.type = 'GoogleAdWords' - self.client_customer_id = kwargs['client_customer_id'] - self.developer_token = kwargs['developer_token'] - self.authentication_type = kwargs['authentication_type'] - self.refresh_token = kwargs.get('refresh_token', None) - self.client_id = kwargs.get('client_id', None) - self.client_secret = kwargs.get('client_secret', None) - self.email = kwargs.get('email', None) - self.key_file_path = kwargs.get('key_file_path', None) - self.trusted_cert_path = kwargs.get('trusted_cert_path', None) - self.use_system_trust_store = kwargs.get('use_system_trust_store', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(DatabricksSparkPythonActivity, self).__init__(**kwargs) + self.type = 'DatabricksSparkPython' # type: str + self.python_file = kwargs['python_file'] + self.parameters = kwargs.get('parameters', None) + self.libraries = kwargs.get('libraries', None) -class GoogleAdWordsObjectDataset(Dataset): - """Google AdWords service dataset. +class DataFlow(msrest.serialization.Model): + """Azure Synapse nested object which contains a flow with data movements and transformations. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: MappingDataFlow. All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Type of data flow.Constant filled by server. :type type: str - :param description: Dataset description. + :param description: The description of the data flow. :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. + :param annotations: List of tags that can be used for describing the data flow. :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :param folder: The folder that this data flow is in. If not specified, Data flow will appear at + the root level. + :type folder: ~azure.synapse.artifacts.models.DataFlowFolder """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'folder': {'key': 'folder', 'type': 'DataFlowFolder'}, + } + + _subtype_map = { + 'type': {'MappingDataFlow': 'MappingDataFlow'} } def __init__( self, **kwargs ): - super(GoogleAdWordsObjectDataset, self).__init__(**kwargs) - self.type = 'GoogleAdWordsObject' - self.table_name = kwargs.get('table_name', None) + super(DataFlow, self).__init__(**kwargs) + self.type = None # type: Optional[str] + self.description = kwargs.get('description', None) + self.annotations = kwargs.get('annotations', None) + self.folder = kwargs.get('folder', None) -class GoogleBigQueryLinkedService(LinkedService): - """Google BigQuery service linked service. +class DataFlowDebugCommandRequest(msrest.serialization.Model): + """Request body structure for data flow expression preview. All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param project: Required. The default BigQuery project to query against. - :type project: object - :param additional_projects: A comma-separated list of public BigQuery projects to access. - :type additional_projects: object - :param request_google_drive_scope: Whether to request access to Google Drive. Allowing Google - Drive access enables support for federated tables that combine BigQuery data with data from - Google Drive. The default value is false. - :type request_google_drive_scope: object - :param authentication_type: Required. The OAuth 2.0 authentication mechanism used for - authentication. ServiceAuthentication can only be used on self-hosted IR. Possible values - include: "ServiceAuthentication", "UserAuthentication". - :type authentication_type: str or - ~azure.synapse.artifacts.models.GoogleBigQueryAuthenticationType - :param refresh_token: The refresh token obtained from Google for authorizing access to BigQuery - for UserAuthentication. - :type refresh_token: ~azure.synapse.artifacts.models.SecretBase - :param client_id: The client id of the google application used to acquire the refresh token. - Type: string (or Expression with resultType string). - :type client_id: object - :param client_secret: The client secret of the google application used to acquire the refresh - token. - :type client_secret: ~azure.synapse.artifacts.models.SecretBase - :param email: The service account email ID that is used for ServiceAuthentication and can only - be used on self-hosted IR. - :type email: object - :param key_file_path: The full path to the .p12 key file that is used to authenticate the - service account email address and can only be used on self-hosted IR. - :type key_file_path: object - :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for - verifying the server when connecting over SSL. This property can only be set when using SSL on - self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust - store or from a specified PEM file. The default value is false. - :type use_system_trust_store: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param session_id: Required. The ID of data flow debug session. + :type session_id: str + :param data_flow_name: The data flow which contains the debug session. + :type data_flow_name: str + :param command_name: The command name. + :type command_name: str + :param command_payload: Required. The command payload object. + :type command_payload: object """ _validation = { - 'type': {'required': True}, - 'project': {'required': True}, - 'authentication_type': {'required': True}, + 'session_id': {'required': True}, + 'command_payload': {'required': True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'project': {'key': 'typeProperties.project', 'type': 'object'}, - 'additional_projects': {'key': 'typeProperties.additionalProjects', 'type': 'object'}, - 'request_google_drive_scope': {'key': 'typeProperties.requestGoogleDriveScope', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'email': {'key': 'typeProperties.email', 'type': 'object'}, - 'key_file_path': {'key': 'typeProperties.keyFilePath', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'session_id': {'key': 'sessionId', 'type': 'str'}, + 'data_flow_name': {'key': 'dataFlowName', 'type': 'str'}, + 'command_name': {'key': 'commandName', 'type': 'str'}, + 'command_payload': {'key': 'commandPayload', 'type': 'object'}, } def __init__( self, **kwargs ): - super(GoogleBigQueryLinkedService, self).__init__(**kwargs) - self.type = 'GoogleBigQuery' - self.project = kwargs['project'] - self.additional_projects = kwargs.get('additional_projects', None) - self.request_google_drive_scope = kwargs.get('request_google_drive_scope', None) - self.authentication_type = kwargs['authentication_type'] - self.refresh_token = kwargs.get('refresh_token', None) - self.client_id = kwargs.get('client_id', None) - self.client_secret = kwargs.get('client_secret', None) - self.email = kwargs.get('email', None) - self.key_file_path = kwargs.get('key_file_path', None) - self.trusted_cert_path = kwargs.get('trusted_cert_path', None) - self.use_system_trust_store = kwargs.get('use_system_trust_store', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - + super(DataFlowDebugCommandRequest, self).__init__(**kwargs) + self.session_id = kwargs['session_id'] + self.data_flow_name = kwargs.get('data_flow_name', None) + self.command_name = kwargs.get('command_name', None) + self.command_payload = kwargs['command_payload'] -class GoogleBigQueryObjectDataset(Dataset): - """Google BigQuery service dataset. - All required parameters must be populated in order to send to Azure. +class DataFlowDebugCommandResponse(msrest.serialization.Model): + """Response body structure of data flow result for data preview, statistics or expression preview. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: This property will be retired. Please consider using database + table - properties instead. - :type table_name: object - :param table: The table name of the Google BigQuery. Type: string (or Expression with - resultType string). - :type table: object - :param dataset: The database name of the Google BigQuery. Type: string (or Expression with - resultType string). - :type dataset: object + :param status: The run status of data preview, statistics or expression preview. + :type status: str + :param data: The result data of data preview, statistics or expression preview. + :type data: str """ - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'dataset': {'key': 'typeProperties.dataset', 'type': 'object'}, + 'status': {'key': 'status', 'type': 'str'}, + 'data': {'key': 'data', 'type': 'str'}, } def __init__( self, **kwargs ): - super(GoogleBigQueryObjectDataset, self).__init__(**kwargs) - self.type = 'GoogleBigQueryObject' - self.table_name = kwargs.get('table_name', None) - self.table = kwargs.get('table', None) - self.dataset = kwargs.get('dataset', None) - + super(DataFlowDebugCommandResponse, self).__init__(**kwargs) + self.status = kwargs.get('status', None) + self.data = kwargs.get('data', None) -class GoogleCloudStorageLinkedService(LinkedService): - """Linked service for Google Cloud Storage. - All required parameters must be populated in order to send to Azure. +class DataFlowDebugPackage(msrest.serialization.Model): + """Request body structure for starting data flow debug session. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param access_key_id: The access key identifier of the Google Cloud Storage Identity and Access - Management (IAM) user. Type: string (or Expression with resultType string). - :type access_key_id: object - :param secret_access_key: The secret access key of the Google Cloud Storage Identity and Access - Management (IAM) user. - :type secret_access_key: ~azure.synapse.artifacts.models.SecretBase - :param service_url: This value specifies the endpoint to access with the Google Cloud Storage - Connector. This is an optional property; change it only if you want to try a different service - endpoint or want to switch between https and http. Type: string (or Expression with resultType - string). - :type service_url: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param session_id: The ID of data flow debug session. + :type session_id: str + :param data_flow: Data flow instance. + :type data_flow: ~azure.synapse.artifacts.models.DataFlowDebugResource + :param datasets: List of datasets. + :type datasets: list[~azure.synapse.artifacts.models.DatasetDebugResource] + :param linked_services: List of linked services. + :type linked_services: list[~azure.synapse.artifacts.models.LinkedServiceDebugResource] + :param staging: Staging info for debug session. + :type staging: ~azure.synapse.artifacts.models.DataFlowStagingInfo + :param debug_settings: Data flow debug settings. + :type debug_settings: ~azure.synapse.artifacts.models.DataFlowDebugPackageDebugSettings """ - _validation = { - 'type': {'required': True}, - } - _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, - 'secret_access_key': {'key': 'typeProperties.secretAccessKey', 'type': 'SecretBase'}, - 'service_url': {'key': 'typeProperties.serviceUrl', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'session_id': {'key': 'sessionId', 'type': 'str'}, + 'data_flow': {'key': 'dataFlow', 'type': 'DataFlowDebugResource'}, + 'datasets': {'key': 'datasets', 'type': '[DatasetDebugResource]'}, + 'linked_services': {'key': 'linkedServices', 'type': '[LinkedServiceDebugResource]'}, + 'staging': {'key': 'staging', 'type': 'DataFlowStagingInfo'}, + 'debug_settings': {'key': 'debugSettings', 'type': 'DataFlowDebugPackageDebugSettings'}, } def __init__( self, **kwargs ): - super(GoogleCloudStorageLinkedService, self).__init__(**kwargs) - self.type = 'GoogleCloudStorage' - self.access_key_id = kwargs.get('access_key_id', None) - self.secret_access_key = kwargs.get('secret_access_key', None) - self.service_url = kwargs.get('service_url', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(DataFlowDebugPackage, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.session_id = kwargs.get('session_id', None) + self.data_flow = kwargs.get('data_flow', None) + self.datasets = kwargs.get('datasets', None) + self.linked_services = kwargs.get('linked_services', None) + self.staging = kwargs.get('staging', None) + self.debug_settings = kwargs.get('debug_settings', None) -class GreenplumLinkedService(LinkedService): - """Greenplum Database linked service. +class DataFlowDebugPackageDebugSettings(msrest.serialization.Model): + """Data flow debug settings. - All required parameters must be populated in order to send to Azure. + :param source_settings: Source setting for data flow debug. + :type source_settings: list[~azure.synapse.artifacts.models.DataFlowSourceSetting] + :param parameters: Data flow parameters. + :type parameters: dict[str, object] + :param dataset_parameters: Parameters for dataset. + :type dataset_parameters: object + """ - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: An ODBC connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param pwd: The Azure key vault secret reference of password in connection string. - :type pwd: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, + _attribute_map = { + 'source_settings': {'key': 'sourceSettings', 'type': '[DataFlowSourceSetting]'}, + 'parameters': {'key': 'parameters', 'type': '{object}'}, + 'dataset_parameters': {'key': 'datasetParameters', 'type': 'object'}, } + def __init__( + self, + **kwargs + ): + super(DataFlowDebugPackageDebugSettings, self).__init__(**kwargs) + self.source_settings = kwargs.get('source_settings', None) + self.parameters = kwargs.get('parameters', None) + self.dataset_parameters = kwargs.get('dataset_parameters', None) + + +class DataFlowDebugPreviewDataRequest(msrest.serialization.Model): + """Request body structure for data flow preview data. + + :param session_id: The ID of data flow debug session. + :type session_id: str + :param data_flow_name: The data flow which contains the debug session. + :type data_flow_name: str + :param stream_name: The output stream name. + :type stream_name: str + :param row_limits: The row limit for preview request. + :type row_limits: int + """ + _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'session_id': {'key': 'sessionId', 'type': 'str'}, + 'data_flow_name': {'key': 'dataFlowName', 'type': 'str'}, + 'stream_name': {'key': 'streamName', 'type': 'str'}, + 'row_limits': {'key': 'rowLimits', 'type': 'int'}, } def __init__( self, **kwargs ): - super(GreenplumLinkedService, self).__init__(**kwargs) - self.type = 'Greenplum' - self.connection_string = kwargs.get('connection_string', None) - self.pwd = kwargs.get('pwd', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - + super(DataFlowDebugPreviewDataRequest, self).__init__(**kwargs) + self.session_id = kwargs.get('session_id', None) + self.data_flow_name = kwargs.get('data_flow_name', None) + self.stream_name = kwargs.get('stream_name', None) + self.row_limits = kwargs.get('row_limits', None) -class GreenplumTableDataset(Dataset): - """Greenplum Database dataset. - All required parameters must be populated in order to send to Azure. +class DataFlowDebugQueryResponse(msrest.serialization.Model): + """Response body structure of data flow query for data preview, statistics or expression preview. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table - properties instead. - :type table_name: object - :param table: The table name of Greenplum. Type: string (or Expression with resultType string). - :type table: object - :param schema_type_properties_schema: The schema name of Greenplum. Type: string (or Expression - with resultType string). - :type schema_type_properties_schema: object + :param run_id: The run ID of data flow debug session. + :type run_id: str """ - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + _attribute_map = { + 'run_id': {'key': 'runId', 'type': 'str'}, } + def __init__( + self, + **kwargs + ): + super(DataFlowDebugQueryResponse, self).__init__(**kwargs) + self.run_id = kwargs.get('run_id', None) + + +class SubResourceDebugResource(msrest.serialization.Model): + """Azure Synapse nested debug resource. + + :param name: The resource name. + :type name: str + """ + _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'name': {'key': 'name', 'type': 'str'}, } def __init__( self, **kwargs ): - super(GreenplumTableDataset, self).__init__(**kwargs) - self.type = 'GreenplumTable' - self.table_name = kwargs.get('table_name', None) - self.table = kwargs.get('table', None) - self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) + super(SubResourceDebugResource, self).__init__(**kwargs) + self.name = kwargs.get('name', None) -class HBaseLinkedService(LinkedService): - """HBase server linked service. +class DataFlowDebugResource(SubResourceDebugResource): + """Data flow debug resource. All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param host: Required. The IP address or host name of the HBase server. (i.e. 192.168.222.160). - :type host: object - :param port: The TCP port that the HBase instance uses to listen for client connections. The - default value is 9090. - :type port: object - :param http_path: The partial URL corresponding to the HBase server. (i.e. - /gateway/sandbox/hbase/version). - :type http_path: object - :param authentication_type: Required. The authentication mechanism to use to connect to the - HBase server. Possible values include: "Anonymous", "Basic". - :type authentication_type: str or ~azure.synapse.artifacts.models.HBaseAuthenticationType - :param username: The user name used to connect to the HBase instance. - :type username: object - :param password: The password corresponding to the user name. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The - default value is false. - :type enable_ssl: object - :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for - verifying the server when connecting over SSL. This property can only be set when using SSL on - self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate - name to match the host name of the server when connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: object - :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from - the server. The default value is false. - :type allow_self_signed_server_cert: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param name: The resource name. + :type name: str + :param properties: Required. Data flow properties. + :type properties: ~azure.synapse.artifacts.models.DataFlow """ _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - 'authentication_type': {'required': True}, + 'properties': {'required': True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, - 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'name': {'key': 'name', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'DataFlow'}, } def __init__( self, **kwargs ): - super(HBaseLinkedService, self).__init__(**kwargs) - self.type = 'HBase' - self.host = kwargs['host'] - self.port = kwargs.get('port', None) - self.http_path = kwargs.get('http_path', None) - self.authentication_type = kwargs['authentication_type'] - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.enable_ssl = kwargs.get('enable_ssl', None) - self.trusted_cert_path = kwargs.get('trusted_cert_path', None) - self.allow_host_name_cn_mismatch = kwargs.get('allow_host_name_cn_mismatch', None) - self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(DataFlowDebugResource, self).__init__(**kwargs) + self.properties = kwargs['properties'] -class HBaseObjectDataset(Dataset): - """HBase server dataset. +class DataFlowDebugResultResponse(msrest.serialization.Model): + """Response body structure of data flow result for data preview, statistics or expression preview. - All required parameters must be populated in order to send to Azure. + :param status: The run status of data preview, statistics or expression preview. + :type status: str + :param data: The result data of data preview, statistics or expression preview. + :type data: str + """ - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + _attribute_map = { + 'status': {'key': 'status', 'type': 'str'}, + 'data': {'key': 'data', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(DataFlowDebugResultResponse, self).__init__(**kwargs) + self.status = kwargs.get('status', None) + self.data = kwargs.get('data', None) + + +class DataFlowDebugSessionInfo(msrest.serialization.Model): + """Data flow debug session info. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param data_flow_name: The name of the data flow. + :type data_flow_name: str + :param compute_type: Compute type of the cluster. + :type compute_type: str + :param core_count: Core count of the cluster. + :type core_count: int + :param node_count: Node count of the cluster. (deprecated property). + :type node_count: int + :param integration_runtime_name: Attached integration runtime name of data flow debug session. + :type integration_runtime_name: str + :param session_id: The ID of data flow debug session. + :type session_id: str + :param start_time: Start time of data flow debug session. + :type start_time: str + :param time_to_live_in_minutes: Compute type of the cluster. + :type time_to_live_in_minutes: int + :param last_activity_time: Last activity time of data flow debug session. + :type last_activity_time: str """ - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'data_flow_name': {'key': 'dataFlowName', 'type': 'str'}, + 'compute_type': {'key': 'computeType', 'type': 'str'}, + 'core_count': {'key': 'coreCount', 'type': 'int'}, + 'node_count': {'key': 'nodeCount', 'type': 'int'}, + 'integration_runtime_name': {'key': 'integrationRuntimeName', 'type': 'str'}, + 'session_id': {'key': 'sessionId', 'type': 'str'}, + 'start_time': {'key': 'startTime', 'type': 'str'}, + 'time_to_live_in_minutes': {'key': 'timeToLiveInMinutes', 'type': 'int'}, + 'last_activity_time': {'key': 'lastActivityTime', 'type': 'str'}, } + def __init__( + self, + **kwargs + ): + super(DataFlowDebugSessionInfo, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.data_flow_name = kwargs.get('data_flow_name', None) + self.compute_type = kwargs.get('compute_type', None) + self.core_count = kwargs.get('core_count', None) + self.node_count = kwargs.get('node_count', None) + self.integration_runtime_name = kwargs.get('integration_runtime_name', None) + self.session_id = kwargs.get('session_id', None) + self.start_time = kwargs.get('start_time', None) + self.time_to_live_in_minutes = kwargs.get('time_to_live_in_minutes', None) + self.last_activity_time = kwargs.get('last_activity_time', None) + + +class DataFlowDebugStatisticsRequest(msrest.serialization.Model): + """Request body structure for data flow statistics. + + :param session_id: The ID of data flow debug session. + :type session_id: str + :param data_flow_name: The data flow which contains the debug session. + :type data_flow_name: str + :param stream_name: The output stream name. + :type stream_name: str + :param columns: List of column names. + :type columns: list[str] + """ + _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'session_id': {'key': 'sessionId', 'type': 'str'}, + 'data_flow_name': {'key': 'dataFlowName', 'type': 'str'}, + 'stream_name': {'key': 'streamName', 'type': 'str'}, + 'columns': {'key': 'columns', 'type': '[str]'}, } def __init__( self, **kwargs ): - super(HBaseObjectDataset, self).__init__(**kwargs) - self.type = 'HBaseObject' - self.table_name = kwargs.get('table_name', None) + super(DataFlowDebugStatisticsRequest, self).__init__(**kwargs) + self.session_id = kwargs.get('session_id', None) + self.data_flow_name = kwargs.get('data_flow_name', None) + self.stream_name = kwargs.get('stream_name', None) + self.columns = kwargs.get('columns', None) -class HdfsLinkedService(LinkedService): - """Hadoop Distributed File System (HDFS) linked service. +class DataFlowFolder(msrest.serialization.Model): + """The folder that this data flow is in. If not specified, Data flow will appear at the root level. + + :param name: The name of the folder that this data flow is in. + :type name: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(DataFlowFolder, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + + +class DataFlowListResponse(msrest.serialization.Model): + """A list of data flow resources. All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param url: Required. The URL of the HDFS service endpoint, e.g. - http://myhostname:50070/webhdfs/v1 . Type: string (or Expression with resultType string). - :type url: object - :param authentication_type: Type of authentication used to connect to the HDFS. Possible values - are: Anonymous and Windows. Type: string (or Expression with resultType string). - :type authentication_type: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - :param user_name: User name for Windows authentication. Type: string (or Expression with - resultType string). - :type user_name: object - :param password: Password for Windows authentication. - :type password: ~azure.synapse.artifacts.models.SecretBase + :param value: Required. List of data flows. + :type value: list[~azure.synapse.artifacts.models.DataFlowResource] + :param next_link: The link to the next page of results, if any remaining results exist. + :type next_link: str """ _validation = { - 'type': {'required': True}, - 'url': {'required': True}, + 'value': {'required': True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'value': {'key': 'value', 'type': '[DataFlowResource]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, } def __init__( self, **kwargs ): - super(HdfsLinkedService, self).__init__(**kwargs) - self.type = 'Hdfs' - self.url = kwargs['url'] - self.authentication_type = kwargs.get('authentication_type', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) + super(DataFlowListResponse, self).__init__(**kwargs) + self.value = kwargs['value'] + self.next_link = kwargs.get('next_link', None) -class HDInsightHiveActivity(ExecutionActivity): - """HDInsight Hive activity type. +class DataFlowReference(msrest.serialization.Model): + """Data flow reference type. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param storage_linked_services: Storage linked service references. - :type storage_linked_services: list[~azure.synapse.artifacts.models.LinkedServiceReference] - :param arguments: User specified arguments to HDInsightActivity. - :type arguments: list[object] - :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". - :type get_debug_info: str or ~azure.synapse.artifacts.models.HDInsightActivityDebugInfoOption - :param script_path: Script path. Type: string (or Expression with resultType string). - :type script_path: object - :param script_linked_service: Script linked service reference. - :type script_linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference - :param defines: Allows user to specify defines for Hive job request. - :type defines: dict[str, object] - :param variables: User specified arguments under hivevar namespace. - :type variables: list[object] - :param query_timeout: Query timeout value (in minutes). Effective when the HDInsight cluster - is with ESP (Enterprise Security Package). - :type query_timeout: int + :param type: Required. Data flow reference type. Possible values include: "DataFlowReference". + :type type: str or ~azure.synapse.artifacts.models.DataFlowReferenceType + :param reference_name: Required. Reference data flow name. + :type reference_name: str + :param dataset_parameters: Reference data flow parameters from dataset. + :type dataset_parameters: object """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, + 'reference_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, - 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, - 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, - 'script_path': {'key': 'typeProperties.scriptPath', 'type': 'object'}, - 'script_linked_service': {'key': 'typeProperties.scriptLinkedService', 'type': 'LinkedServiceReference'}, - 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, - 'variables': {'key': 'typeProperties.variables', 'type': '[object]'}, - 'query_timeout': {'key': 'typeProperties.queryTimeout', 'type': 'int'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + 'dataset_parameters': {'key': 'datasetParameters', 'type': 'object'}, } def __init__( self, **kwargs ): - super(HDInsightHiveActivity, self).__init__(**kwargs) - self.type = 'HDInsightHive' - self.storage_linked_services = kwargs.get('storage_linked_services', None) - self.arguments = kwargs.get('arguments', None) - self.get_debug_info = kwargs.get('get_debug_info', None) - self.script_path = kwargs.get('script_path', None) - self.script_linked_service = kwargs.get('script_linked_service', None) - self.defines = kwargs.get('defines', None) - self.variables = kwargs.get('variables', None) - self.query_timeout = kwargs.get('query_timeout', None) + super(DataFlowReference, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = kwargs['type'] + self.reference_name = kwargs['reference_name'] + self.dataset_parameters = kwargs.get('dataset_parameters', None) -class HDInsightLinkedService(LinkedService): - """HDInsight linked service. +class DataFlowResource(AzureEntityResource): + """Data flow resource type. + + Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param cluster_uri: Required. HDInsight cluster URI. Type: string (or Expression with - resultType string). - :type cluster_uri: object - :param user_name: HDInsight cluster user name. Type: string (or Expression with resultType - string). - :type user_name: object - :param password: HDInsight cluster password. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param linked_service_name: The Azure Storage linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param hcatalog_linked_service_name: A reference to the Azure SQL linked service that points to - the HCatalog database. - :type hcatalog_linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - :param is_esp_enabled: Specify if the HDInsight is created with ESP (Enterprise Security - Package). Type: Boolean. - :type is_esp_enabled: object - :param file_system: Specify the FileSystem if the main storage for the HDInsight is ADLS Gen2. - Type: string (or Expression with resultType string). - :type file_system: object + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + :ivar etag: Resource Etag. + :vartype etag: str + :param properties: Required. Data flow properties. + :type properties: ~azure.synapse.artifacts.models.DataFlow """ _validation = { - 'type': {'required': True}, - 'cluster_uri': {'required': True}, + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'DataFlow'}, + } + + def __init__( + self, + **kwargs + ): + super(DataFlowResource, self).__init__(**kwargs) + self.properties = kwargs['properties'] + + +class Transformation(msrest.serialization.Model): + """A data flow transformation. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. Transformation name. + :type name: str + :param description: Transformation description. + :type description: str + """ + + _validation = { + 'name': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'cluster_uri': {'key': 'typeProperties.clusterUri', 'type': 'object'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, - 'hcatalog_linked_service_name': {'key': 'typeProperties.hcatalogLinkedServiceName', 'type': 'LinkedServiceReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'is_esp_enabled': {'key': 'typeProperties.isEspEnabled', 'type': 'object'}, - 'file_system': {'key': 'typeProperties.fileSystem', 'type': 'object'}, } def __init__( self, **kwargs ): - super(HDInsightLinkedService, self).__init__(**kwargs) - self.type = 'HDInsight' - self.cluster_uri = kwargs['cluster_uri'] - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.linked_service_name = kwargs.get('linked_service_name', None) - self.hcatalog_linked_service_name = kwargs.get('hcatalog_linked_service_name', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.is_esp_enabled = kwargs.get('is_esp_enabled', None) - self.file_system = kwargs.get('file_system', None) + super(Transformation, self).__init__(**kwargs) + self.name = kwargs['name'] + self.description = kwargs.get('description', None) -class HDInsightMapReduceActivity(ExecutionActivity): - """HDInsight MapReduce activity type. +class DataFlowSink(Transformation): + """Transformation for data flow sink. All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param name: Required. Activity name. + :param name: Required. Transformation name. :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. + :param description: Transformation description. :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param storage_linked_services: Storage linked service references. - :type storage_linked_services: list[~azure.synapse.artifacts.models.LinkedServiceReference] - :param arguments: User specified arguments to HDInsightActivity. - :type arguments: list[object] - :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". - :type get_debug_info: str or ~azure.synapse.artifacts.models.HDInsightActivityDebugInfoOption - :param class_name: Required. Class name. Type: string (or Expression with resultType string). - :type class_name: object - :param jar_file_path: Required. Jar path. Type: string (or Expression with resultType string). - :type jar_file_path: object - :param jar_linked_service: Jar linked service reference. - :type jar_linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference - :param jar_libs: Jar libs. - :type jar_libs: list[object] - :param defines: Allows user to specify defines for the MapReduce job request. - :type defines: dict[str, object] + :param dataset: Dataset reference. + :type dataset: ~azure.synapse.artifacts.models.DatasetReference """ _validation = { 'name': {'required': True}, - 'type': {'required': True}, - 'class_name': {'required': True}, - 'jar_file_path': {'required': True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, - 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, - 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, - 'class_name': {'key': 'typeProperties.className', 'type': 'object'}, - 'jar_file_path': {'key': 'typeProperties.jarFilePath', 'type': 'object'}, - 'jar_linked_service': {'key': 'typeProperties.jarLinkedService', 'type': 'LinkedServiceReference'}, - 'jar_libs': {'key': 'typeProperties.jarLibs', 'type': '[object]'}, - 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, + 'dataset': {'key': 'dataset', 'type': 'DatasetReference'}, } def __init__( self, **kwargs ): - super(HDInsightMapReduceActivity, self).__init__(**kwargs) - self.type = 'HDInsightMapReduce' - self.storage_linked_services = kwargs.get('storage_linked_services', None) - self.arguments = kwargs.get('arguments', None) - self.get_debug_info = kwargs.get('get_debug_info', None) - self.class_name = kwargs['class_name'] - self.jar_file_path = kwargs['jar_file_path'] - self.jar_linked_service = kwargs.get('jar_linked_service', None) - self.jar_libs = kwargs.get('jar_libs', None) - self.defines = kwargs.get('defines', None) + super(DataFlowSink, self).__init__(**kwargs) + self.dataset = kwargs.get('dataset', None) -class HDInsightOnDemandLinkedService(LinkedService): - """HDInsight ondemand linked service. +class DataFlowSource(Transformation): + """Transformation for data flow source. All required parameters must be populated in order to send to Azure. + :param name: Required. Transformation name. + :type name: str + :param description: Transformation description. + :type description: str + :param dataset: Dataset reference. + :type dataset: ~azure.synapse.artifacts.models.DatasetReference + """ + + _validation = { + 'name': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'dataset': {'key': 'dataset', 'type': 'DatasetReference'}, + } + + def __init__( + self, + **kwargs + ): + super(DataFlowSource, self).__init__(**kwargs) + self.dataset = kwargs.get('dataset', None) + + +class DataFlowSourceSetting(msrest.serialization.Model): + """Definition of data flow source setting for debug. + :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param cluster_size: Required. Number of worker/data nodes in the cluster. Suggestion value: 4. - Type: string (or Expression with resultType string). - :type cluster_size: object - :param time_to_live: Required. The allowed idle time for the on-demand HDInsight cluster. - Specifies how long the on-demand HDInsight cluster stays alive after completion of an activity - run if there are no other active jobs in the cluster. The minimum value is 5 mins. Type: string - (or Expression with resultType string). - :type time_to_live: object - :param version: Required. Version of the HDInsight cluster.  Type: string (or Expression with - resultType string). - :type version: object - :param linked_service_name: Required. Azure Storage linked service to be used by the on-demand - cluster for storing and processing data. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param host_subscription_id: Required. The customer’s subscription to host the cluster. Type: - string (or Expression with resultType string). - :type host_subscription_id: object - :param service_principal_id: The service principal id for the hostSubscriptionId. Type: string + :param source_name: The data flow source name. + :type source_name: str + :param row_limit: Defines the row limit of data flow source in debug. + :type row_limit: int + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_name': {'key': 'sourceName', 'type': 'str'}, + 'row_limit': {'key': 'rowLimit', 'type': 'int'}, + } + + def __init__( + self, + **kwargs + ): + super(DataFlowSourceSetting, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.source_name = kwargs.get('source_name', None) + self.row_limit = kwargs.get('row_limit', None) + + +class DataFlowStagingInfo(msrest.serialization.Model): + """Staging info for execute data flow activity. + + :param linked_service: Staging linked service reference. + :type linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference + :param folder_path: Folder path for staging blob. + :type folder_path: str + """ + + _attribute_map = { + 'linked_service': {'key': 'linkedService', 'type': 'LinkedServiceReference'}, + 'folder_path': {'key': 'folderPath', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(DataFlowStagingInfo, self).__init__(**kwargs) + self.linked_service = kwargs.get('linked_service', None) + self.folder_path = kwargs.get('folder_path', None) + + +class DataLakeAnalyticsUSQLActivity(ExecutionActivity): + """Data Lake Analytics U-SQL activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param linked_service_name: Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param script_path: Required. Case-sensitive path to folder that contains the U-SQL script. + Type: string (or Expression with resultType string). + :type script_path: object + :param script_linked_service: Required. Script linked service reference. + :type script_linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference + :param degree_of_parallelism: The maximum number of nodes simultaneously used to run the job. + Default value is 1. Type: integer (or Expression with resultType integer), minimum: 1. + :type degree_of_parallelism: object + :param priority: Determines which jobs out of all that are queued should be selected to run + first. The lower the number, the higher the priority. Default value is 1000. Type: integer (or + Expression with resultType integer), minimum: 1. + :type priority: object + :param parameters: Parameters for U-SQL job request. + :type parameters: dict[str, object] + :param runtime_version: Runtime version of the U-SQL engine to use. Type: string (or Expression + with resultType string). + :type runtime_version: object + :param compilation_mode: Compilation mode of U-SQL. Must be one of these values : Semantic, + Full and SingleBox. Type: string (or Expression with resultType string). + :type compilation_mode: object + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'script_path': {'required': True}, + 'script_linked_service': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'script_path': {'key': 'typeProperties.scriptPath', 'type': 'object'}, + 'script_linked_service': {'key': 'typeProperties.scriptLinkedService', 'type': 'LinkedServiceReference'}, + 'degree_of_parallelism': {'key': 'typeProperties.degreeOfParallelism', 'type': 'object'}, + 'priority': {'key': 'typeProperties.priority', 'type': 'object'}, + 'parameters': {'key': 'typeProperties.parameters', 'type': '{object}'}, + 'runtime_version': {'key': 'typeProperties.runtimeVersion', 'type': 'object'}, + 'compilation_mode': {'key': 'typeProperties.compilationMode', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(DataLakeAnalyticsUSQLActivity, self).__init__(**kwargs) + self.type = 'DataLakeAnalyticsU-SQL' # type: str + self.script_path = kwargs['script_path'] + self.script_linked_service = kwargs['script_linked_service'] + self.degree_of_parallelism = kwargs.get('degree_of_parallelism', None) + self.priority = kwargs.get('priority', None) + self.parameters = kwargs.get('parameters', None) + self.runtime_version = kwargs.get('runtime_version', None) + self.compilation_mode = kwargs.get('compilation_mode', None) + + +class DataLakeStorageAccountDetails(msrest.serialization.Model): + """Details of the data lake storage account associated with the workspace. + + :param account_url: Account URL. + :type account_url: str + :param filesystem: Filesystem name. + :type filesystem: str + """ + + _attribute_map = { + 'account_url': {'key': 'accountUrl', 'type': 'str'}, + 'filesystem': {'key': 'filesystem', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(DataLakeStorageAccountDetails, self).__init__(**kwargs) + self.account_url = kwargs.get('account_url', None) + self.filesystem = kwargs.get('filesystem', None) + + +class DatasetCompression(msrest.serialization.Model): + """The compression method used on a dataset. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: DatasetBZip2Compression, DatasetDeflateCompression, DatasetGZipCompression, DatasetZipDeflateCompression. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset compression.Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'BZip2': 'DatasetBZip2Compression', 'Deflate': 'DatasetDeflateCompression', 'GZip': 'DatasetGZipCompression', 'ZipDeflate': 'DatasetZipDeflateCompression'} + } + + def __init__( + self, + **kwargs + ): + super(DatasetCompression, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = 'DatasetCompression' # type: str + + +class DatasetBZip2Compression(DatasetCompression): + """The BZip2 compression method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset compression.Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(DatasetBZip2Compression, self).__init__(**kwargs) + self.type = 'BZip2' # type: str + + +class DatasetDataElement(msrest.serialization.Model): + """Columns that define the structure of the dataset. + + :param name: Name of the column. Type: string (or Expression with resultType string). + :type name: object + :param type: Type of the column. Type: string (or Expression with resultType string). + :type type: object + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(DatasetDataElement, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.type = kwargs.get('type', None) + + +class DatasetDebugResource(SubResourceDebugResource): + """Dataset debug resource. + + All required parameters must be populated in order to send to Azure. + + :param name: The resource name. + :type name: str + :param properties: Required. Dataset properties. + :type properties: ~azure.synapse.artifacts.models.Dataset + """ + + _validation = { + 'properties': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'Dataset'}, + } + + def __init__( + self, + **kwargs + ): + super(DatasetDebugResource, self).__init__(**kwargs) + self.properties = kwargs['properties'] + + +class DatasetDeflateCompression(DatasetCompression): + """The Deflate compression method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset compression.Constant filled by server. + :type type: str + :param level: The Deflate compression level. Possible values include: "Optimal", "Fastest". + :type level: str or ~azure.synapse.artifacts.models.DatasetCompressionLevel + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(DatasetDeflateCompression, self).__init__(**kwargs) + self.type = 'Deflate' # type: str + self.level = kwargs.get('level', None) + + +class DatasetFolder(msrest.serialization.Model): + """The folder that this Dataset is in. If not specified, Dataset will appear at the root level. + + :param name: The name of the folder that this Dataset is in. + :type name: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(DatasetFolder, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + + +class DatasetGZipCompression(DatasetCompression): + """The GZip compression method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset compression.Constant filled by server. + :type type: str + :param level: The GZip compression level. Possible values include: "Optimal", "Fastest". + :type level: str or ~azure.synapse.artifacts.models.DatasetCompressionLevel + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(DatasetGZipCompression, self).__init__(**kwargs) + self.type = 'GZip' # type: str + self.level = kwargs.get('level', None) + + +class DatasetListResponse(msrest.serialization.Model): + """A list of dataset resources. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of datasets. + :type value: list[~azure.synapse.artifacts.models.DatasetResource] + :param next_link: The link to the next page of results, if any remaining results exist. + :type next_link: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[DatasetResource]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(DatasetListResponse, self).__init__(**kwargs) + self.value = kwargs['value'] + self.next_link = kwargs.get('next_link', None) + + +class DatasetReference(msrest.serialization.Model): + """Dataset reference type. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Dataset reference type. Possible values include: "DatasetReference". + :type type: str or ~azure.synapse.artifacts.models.DatasetReferenceType + :param reference_name: Required. Reference dataset name. + :type reference_name: str + :param parameters: Arguments for dataset. + :type parameters: dict[str, object] + """ + + _validation = { + 'type': {'required': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{object}'}, + } + + def __init__( + self, + **kwargs + ): + super(DatasetReference, self).__init__(**kwargs) + self.type = kwargs['type'] + self.reference_name = kwargs['reference_name'] + self.parameters = kwargs.get('parameters', None) + + +class DatasetResource(AzureEntityResource): + """Dataset resource type. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + :ivar etag: Resource Etag. + :vartype etag: str + :param properties: Required. Dataset properties. + :type properties: ~azure.synapse.artifacts.models.Dataset + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'Dataset'}, + } + + def __init__( + self, + **kwargs + ): + super(DatasetResource, self).__init__(**kwargs) + self.properties = kwargs['properties'] + + +class DatasetSchemaDataElement(msrest.serialization.Model): + """Columns that define the physical type schema of the dataset. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Name of the schema column. Type: string (or Expression with resultType string). + :type name: object + :param type: Type of the schema column. Type: string (or Expression with resultType string). + :type type: object + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(DatasetSchemaDataElement, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.name = kwargs.get('name', None) + self.type = kwargs.get('type', None) + + +class DatasetZipDeflateCompression(DatasetCompression): + """The ZipDeflate compression method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset compression.Constant filled by server. + :type type: str + :param level: The ZipDeflate compression level. Possible values include: "Optimal", "Fastest". + :type level: str or ~azure.synapse.artifacts.models.DatasetCompressionLevel + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(DatasetZipDeflateCompression, self).__init__(**kwargs) + self.type = 'ZipDeflate' # type: str + self.level = kwargs.get('level', None) + + +class Db2LinkedService(LinkedService): + """Linked service for DB2 data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param server: Required. Server name for connection. Type: string (or Expression with + resultType string). + :type server: object + :param database: Required. Database name for connection. Type: string (or Expression with + resultType string). + :type database: object + :param authentication_type: AuthenticationType to be used for connection. Possible values + include: "Basic". + :type authentication_type: str or ~azure.synapse.artifacts.models.Db2AuthenticationType + :param username: Username for authentication. Type: string (or Expression with resultType + string). + :type username: object + :param password: Password for authentication. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param package_collection: Under where packages are created when querying database. Type: + string (or Expression with resultType string). + :type package_collection: object + :param certificate_common_name: Certificate Common Name when TLS is enabled. Type: string (or + Expression with resultType string). + :type certificate_common_name: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'server': {'required': True}, + 'database': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'package_collection': {'key': 'typeProperties.packageCollection', 'type': 'object'}, + 'certificate_common_name': {'key': 'typeProperties.certificateCommonName', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(Db2LinkedService, self).__init__(**kwargs) + self.type = 'Db2' # type: str + self.server = kwargs['server'] + self.database = kwargs['database'] + self.authentication_type = kwargs.get('authentication_type', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.package_collection = kwargs.get('package_collection', None) + self.certificate_common_name = kwargs.get('certificate_common_name', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + + +class Db2Source(TabularSource): + """A copy activity source for Db2 databases. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: Database query. Type: string (or Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(Db2Source, self).__init__(**kwargs) + self.type = 'Db2Source' # type: str + self.query = kwargs.get('query', None) + + +class Db2TableDataset(Dataset): + """The Db2 table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: This property will be retired. Please consider using schema + table + properties instead. + :type table_name: object + :param schema_type_properties_schema: The Db2 schema name. Type: string (or Expression with + resultType string). + :type schema_type_properties_schema: object + :param table: The Db2 table name. Type: string (or Expression with resultType string). + :type table: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(Db2TableDataset, self).__init__(**kwargs) + self.type = 'Db2Table' # type: str + self.table_name = kwargs.get('table_name', None) + self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) + self.table = kwargs.get('table', None) + + +class DeleteActivity(ExecutionActivity): + """Delete activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param linked_service_name: Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param recursive: If true, files or sub-folders under current folder path will be deleted + recursively. Default is false. Type: boolean (or Expression with resultType boolean). + :type recursive: object + :param max_concurrent_connections: The max concurrent connections to connect data source at the + same time. + :type max_concurrent_connections: int + :param enable_logging: Whether to record detailed logs of delete-activity execution. Default + value is false. Type: boolean (or Expression with resultType boolean). + :type enable_logging: object + :param log_storage_settings: Log storage settings customer need to provide when enableLogging + is true. + :type log_storage_settings: ~azure.synapse.artifacts.models.LogStorageSettings + :param dataset: Required. Delete activity dataset reference. + :type dataset: ~azure.synapse.artifacts.models.DatasetReference + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'max_concurrent_connections': {'minimum': 1}, + 'dataset': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'recursive': {'key': 'typeProperties.recursive', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'typeProperties.maxConcurrentConnections', 'type': 'int'}, + 'enable_logging': {'key': 'typeProperties.enableLogging', 'type': 'object'}, + 'log_storage_settings': {'key': 'typeProperties.logStorageSettings', 'type': 'LogStorageSettings'}, + 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, + } + + def __init__( + self, + **kwargs + ): + super(DeleteActivity, self).__init__(**kwargs) + self.type = 'Delete' # type: str + self.recursive = kwargs.get('recursive', None) + self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) + self.enable_logging = kwargs.get('enable_logging', None) + self.log_storage_settings = kwargs.get('log_storage_settings', None) + self.dataset = kwargs['dataset'] + + +class DeleteDataFlowDebugSessionRequest(msrest.serialization.Model): + """Request body structure for deleting data flow debug session. + + :param session_id: The ID of data flow debug session. + :type session_id: str + :param data_flow_name: The data flow which contains the debug session. + :type data_flow_name: str + """ + + _attribute_map = { + 'session_id': {'key': 'sessionId', 'type': 'str'}, + 'data_flow_name': {'key': 'dataFlowName', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(DeleteDataFlowDebugSessionRequest, self).__init__(**kwargs) + self.session_id = kwargs.get('session_id', None) + self.data_flow_name = kwargs.get('data_flow_name', None) + + +class DelimitedTextDataset(Dataset): + """Delimited text dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param location: The location of the delimited text storage. + :type location: ~azure.synapse.artifacts.models.DatasetLocation + :param column_delimiter: The column delimiter. Type: string (or Expression with resultType + string). + :type column_delimiter: object + :param row_delimiter: The row delimiter. Type: string (or Expression with resultType string). + :type row_delimiter: object + :param encoding_name: The code page name of the preferred encoding. If miss, the default value + is UTF-8, unless BOM denotes another Unicode encoding. Refer to the name column of the table in + the following link to set supported values: + https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with + resultType string). + :type encoding_name: object + :param compression_codec: Possible values include: "bzip2", "gzip", "deflate", "zipDeflate", + "snappy", "lz4". + :type compression_codec: str or ~azure.synapse.artifacts.models.DelimitedTextCompressionCodec + :param compression_level: The data compression method used for DelimitedText. Possible values + include: "Optimal", "Fastest". + :type compression_level: str or ~azure.synapse.artifacts.models.DatasetCompressionLevel + :param quote_char: The quote character. Type: string (or Expression with resultType string). + :type quote_char: object + :param escape_char: The escape character. Type: string (or Expression with resultType string). + :type escape_char: object + :param first_row_as_header: When used as input, treat the first row of data as headers. When + used as output,write the headers into the output as the first row of data. The default value is + false. Type: boolean (or Expression with resultType boolean). + :type first_row_as_header: object + :param null_value: The null value string. Type: string (or Expression with resultType string). + :type null_value: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'column_delimiter': {'key': 'typeProperties.columnDelimiter', 'type': 'object'}, + 'row_delimiter': {'key': 'typeProperties.rowDelimiter', 'type': 'object'}, + 'encoding_name': {'key': 'typeProperties.encodingName', 'type': 'object'}, + 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'str'}, + 'compression_level': {'key': 'typeProperties.compressionLevel', 'type': 'str'}, + 'quote_char': {'key': 'typeProperties.quoteChar', 'type': 'object'}, + 'escape_char': {'key': 'typeProperties.escapeChar', 'type': 'object'}, + 'first_row_as_header': {'key': 'typeProperties.firstRowAsHeader', 'type': 'object'}, + 'null_value': {'key': 'typeProperties.nullValue', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(DelimitedTextDataset, self).__init__(**kwargs) + self.type = 'DelimitedText' # type: str + self.location = kwargs.get('location', None) + self.column_delimiter = kwargs.get('column_delimiter', None) + self.row_delimiter = kwargs.get('row_delimiter', None) + self.encoding_name = kwargs.get('encoding_name', None) + self.compression_codec = kwargs.get('compression_codec', None) + self.compression_level = kwargs.get('compression_level', None) + self.quote_char = kwargs.get('quote_char', None) + self.escape_char = kwargs.get('escape_char', None) + self.first_row_as_header = kwargs.get('first_row_as_header', None) + self.null_value = kwargs.get('null_value', None) + + +class FormatReadSettings(msrest.serialization.Model): + """Format read settings. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: DelimitedTextReadSettings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The read setting type.Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'DelimitedTextReadSettings': 'DelimitedTextReadSettings'} + } + + def __init__( + self, + **kwargs + ): + super(FormatReadSettings, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = 'FormatReadSettings' # type: str + + +class DelimitedTextReadSettings(FormatReadSettings): + """Delimited text read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The read setting type.Constant filled by server. + :type type: str + :param skip_line_count: Indicates the number of non-empty rows to skip when reading data from + input files. Type: integer (or Expression with resultType integer). + :type skip_line_count: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'skip_line_count': {'key': 'skipLineCount', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(DelimitedTextReadSettings, self).__init__(**kwargs) + self.type = 'DelimitedTextReadSettings' # type: str + self.skip_line_count = kwargs.get('skip_line_count', None) + + +class DelimitedTextSink(CopySink): + """A copy activity DelimitedText sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param store_settings: DelimitedText store settings. + :type store_settings: ~azure.synapse.artifacts.models.StoreWriteSettings + :param format_settings: DelimitedText format settings. + :type format_settings: ~azure.synapse.artifacts.models.DelimitedTextWriteSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextWriteSettings'}, + } + + def __init__( + self, + **kwargs + ): + super(DelimitedTextSink, self).__init__(**kwargs) + self.type = 'DelimitedTextSink' # type: str + self.store_settings = kwargs.get('store_settings', None) + self.format_settings = kwargs.get('format_settings', None) + + +class DelimitedTextSource(CopySource): + """A copy activity DelimitedText source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param store_settings: DelimitedText store settings. + :type store_settings: ~azure.synapse.artifacts.models.StoreReadSettings + :param format_settings: DelimitedText format settings. + :type format_settings: ~azure.synapse.artifacts.models.DelimitedTextReadSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextReadSettings'}, + } + + def __init__( + self, + **kwargs + ): + super(DelimitedTextSource, self).__init__(**kwargs) + self.type = 'DelimitedTextSource' # type: str + self.store_settings = kwargs.get('store_settings', None) + self.format_settings = kwargs.get('format_settings', None) + + +class DelimitedTextWriteSettings(FormatWriteSettings): + """Delimited text write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The write setting type.Constant filled by server. + :type type: str + :param quote_all_text: Indicates whether string values should always be enclosed with quotes. + Type: boolean (or Expression with resultType boolean). + :type quote_all_text: object + :param file_extension: Required. The file extension used to create the files. Type: string (or + Expression with resultType string). + :type file_extension: object + """ + + _validation = { + 'type': {'required': True}, + 'file_extension': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'quote_all_text': {'key': 'quoteAllText', 'type': 'object'}, + 'file_extension': {'key': 'fileExtension', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(DelimitedTextWriteSettings, self).__init__(**kwargs) + self.type = 'DelimitedTextWriteSettings' # type: str + self.quote_all_text = kwargs.get('quote_all_text', None) + self.file_extension = kwargs['file_extension'] + + +class DependencyReference(msrest.serialization.Model): + """Referenced dependency. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: SelfDependencyTumblingWindowTriggerReference, TriggerDependencyReference. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. The type of dependency reference.Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'SelfDependencyTumblingWindowTriggerReference': 'SelfDependencyTumblingWindowTriggerReference', 'TriggerDependencyReference': 'TriggerDependencyReference'} + } + + def __init__( + self, + **kwargs + ): + super(DependencyReference, self).__init__(**kwargs) + self.type = None # type: Optional[str] + + +class DistcpSettings(msrest.serialization.Model): + """Distcp settings. + + All required parameters must be populated in order to send to Azure. + + :param resource_manager_endpoint: Required. Specifies the Yarn ResourceManager endpoint. Type: + string (or Expression with resultType string). + :type resource_manager_endpoint: object + :param temp_script_path: Required. Specifies an existing folder path which will be used to + store temp Distcp command script. The script file is generated by ADF and will be removed after + Copy job finished. Type: string (or Expression with resultType string). + :type temp_script_path: object + :param distcp_options: Specifies the Distcp options. Type: string (or Expression with + resultType string). + :type distcp_options: object + """ + + _validation = { + 'resource_manager_endpoint': {'required': True}, + 'temp_script_path': {'required': True}, + } + + _attribute_map = { + 'resource_manager_endpoint': {'key': 'resourceManagerEndpoint', 'type': 'object'}, + 'temp_script_path': {'key': 'tempScriptPath', 'type': 'object'}, + 'distcp_options': {'key': 'distcpOptions', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(DistcpSettings, self).__init__(**kwargs) + self.resource_manager_endpoint = kwargs['resource_manager_endpoint'] + self.temp_script_path = kwargs['temp_script_path'] + self.distcp_options = kwargs.get('distcp_options', None) + + +class DocumentDbCollectionDataset(Dataset): + """Microsoft Azure Document Database Collection dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param collection_name: Required. Document Database collection name. Type: string (or + Expression with resultType string). + :type collection_name: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + 'collection_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'collection_name': {'key': 'typeProperties.collectionName', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(DocumentDbCollectionDataset, self).__init__(**kwargs) + self.type = 'DocumentDbCollection' # type: str + self.collection_name = kwargs['collection_name'] + + +class DocumentDbCollectionSink(CopySink): + """A copy activity Document Database Collection sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param nesting_separator: Nested properties separator. Default is . (dot). Type: string (or + Expression with resultType string). + :type nesting_separator: object + :param write_behavior: Describes how to write data to Azure Cosmos DB. Type: string (or + Expression with resultType string). Allowed values: insert and upsert. + :type write_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(DocumentDbCollectionSink, self).__init__(**kwargs) + self.type = 'DocumentDbCollectionSink' # type: str + self.nesting_separator = kwargs.get('nesting_separator', None) + self.write_behavior = kwargs.get('write_behavior', None) + + +class DocumentDbCollectionSource(CopySource): + """A copy activity Document Database Collection source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query: Documents query. Type: string (or Expression with resultType string). + :type query: object + :param nesting_separator: Nested properties separator. Type: string (or Expression with + resultType string). + :type nesting_separator: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(DocumentDbCollectionSource, self).__init__(**kwargs) + self.type = 'DocumentDbCollectionSource' # type: str + self.query = kwargs.get('query', None) + self.nesting_separator = kwargs.get('nesting_separator', None) + self.query_timeout = kwargs.get('query_timeout', None) + + +class DrillLinkedService(LinkedService): + """Drill server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param connection_string: An ODBC connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param pwd: The Azure key vault secret reference of password in connection string. + :type pwd: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(DrillLinkedService, self).__init__(**kwargs) + self.type = 'Drill' # type: str + self.connection_string = kwargs.get('connection_string', None) + self.pwd = kwargs.get('pwd', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + + +class DrillSource(TabularSource): + """A copy activity Drill server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(DrillSource, self).__init__(**kwargs) + self.type = 'DrillSource' # type: str + self.query = kwargs.get('query', None) + + +class DrillTableDataset(Dataset): + """Drill server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: This property will be retired. Please consider using schema + table + properties instead. + :type table_name: object + :param table: The table name of the Drill. Type: string (or Expression with resultType string). + :type table: object + :param schema_type_properties_schema: The schema name of the Drill. Type: string (or Expression + with resultType string). + :type schema_type_properties_schema: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(DrillTableDataset, self).__init__(**kwargs) + self.type = 'DrillTable' # type: str + self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) + + +class DWCopyCommandDefaultValue(msrest.serialization.Model): + """Default value. + + :param column_name: Column name. Type: object (or Expression with resultType string). + :type column_name: object + :param default_value: The default value of the column. Type: object (or Expression with + resultType string). + :type default_value: object + """ + + _attribute_map = { + 'column_name': {'key': 'columnName', 'type': 'object'}, + 'default_value': {'key': 'defaultValue', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(DWCopyCommandDefaultValue, self).__init__(**kwargs) + self.column_name = kwargs.get('column_name', None) + self.default_value = kwargs.get('default_value', None) + + +class DWCopyCommandSettings(msrest.serialization.Model): + """DW Copy Command settings. + + :param default_values: Specifies the default values for each target column in SQL DW. The + default values in the property overwrite the DEFAULT constraint set in the DB, and identity + column cannot have a default value. Type: array of objects (or Expression with resultType array + of objects). + :type default_values: list[~azure.synapse.artifacts.models.DWCopyCommandDefaultValue] + :param additional_options: Additional options directly passed to SQL DW in Copy Command. Type: + key value pairs (value should be string type) (or Expression with resultType object). Example: + "additionalOptions": { "MAXERRORS": "1000", "DATEFORMAT": "'ymd'" }. + :type additional_options: dict[str, str] + """ + + _attribute_map = { + 'default_values': {'key': 'defaultValues', 'type': '[DWCopyCommandDefaultValue]'}, + 'additional_options': {'key': 'additionalOptions', 'type': '{str}'}, + } + + def __init__( + self, + **kwargs + ): + super(DWCopyCommandSettings, self).__init__(**kwargs) + self.default_values = kwargs.get('default_values', None) + self.additional_options = kwargs.get('additional_options', None) + + +class DynamicsAXLinkedService(LinkedService): + """Dynamics AX linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param url: Required. The Dynamics AX (or Dynamics 365 Finance and Operations) instance OData + endpoint. + :type url: object + :param service_principal_id: Required. Specify the application's client ID. Type: string (or + Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: Required. Specify the application's key. Mark this field as a + SecureString to store it securely in Data Factory, or reference a secret stored in Azure Key + Vault. Type: string (or Expression with resultType string). + :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase + :param tenant: Required. Specify the tenant information (domain name or tenant ID) under which + your application resides. Retrieve it by hovering the mouse in the top-right corner of the + Azure portal. Type: string (or Expression with resultType string). + :type tenant: object + :param aad_resource_id: Required. Specify the resource you are requesting authorization. Type: + string (or Expression with resultType string). + :type aad_resource_id: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + 'service_principal_id': {'required': True}, + 'service_principal_key': {'required': True}, + 'tenant': {'required': True}, + 'aad_resource_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(DynamicsAXLinkedService, self).__init__(**kwargs) + self.type = 'DynamicsAX' # type: str + self.url = kwargs['url'] + self.service_principal_id = kwargs['service_principal_id'] + self.service_principal_key = kwargs['service_principal_key'] + self.tenant = kwargs['tenant'] + self.aad_resource_id = kwargs['aad_resource_id'] + self.encrypted_credential = kwargs.get('encrypted_credential', None) + + +class DynamicsAXResourceDataset(Dataset): + """The path of the Dynamics AX OData entity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param path: Required. The path of the Dynamics AX OData entity. Type: string (or Expression + with resultType string). + :type path: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + 'path': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'path': {'key': 'typeProperties.path', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(DynamicsAXResourceDataset, self).__init__(**kwargs) + self.type = 'DynamicsAXResource' # type: str + self.path = kwargs['path'] + + +class DynamicsAXSource(TabularSource): + """A copy activity Dynamics AX source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(DynamicsAXSource, self).__init__(**kwargs) + self.type = 'DynamicsAXSource' # type: str + self.query = kwargs.get('query', None) + + +class DynamicsCrmEntityDataset(Dataset): + """The Dynamics CRM entity dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param entity_name: The logical name of the entity. Type: string (or Expression with resultType + string). + :type entity_name: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(DynamicsCrmEntityDataset, self).__init__(**kwargs) + self.type = 'DynamicsCrmEntity' # type: str + self.entity_name = kwargs.get('entity_name', None) + + +class DynamicsCrmLinkedService(LinkedService): + """Dynamics CRM linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param deployment_type: Required. The deployment type of the Dynamics CRM instance. 'Online' + for Dynamics CRM Online and 'OnPremisesWithIfd' for Dynamics CRM on-premises with Ifd. Type: + string (or Expression with resultType string). Possible values include: "Online", + "OnPremisesWithIfd". + :type deployment_type: str or ~azure.synapse.artifacts.models.DynamicsDeploymentType + :param host_name: The host name of the on-premises Dynamics CRM server. The property is + required for on-prem and not allowed for online. Type: string (or Expression with resultType + string). + :type host_name: object + :param port: The port of on-premises Dynamics CRM server. The property is required for on-prem + and not allowed for online. Default is 443. Type: integer (or Expression with resultType + integer), minimum: 0. + :type port: object + :param service_uri: The URL to the Microsoft Dynamics CRM server. The property is required for + on-line and not allowed for on-prem. Type: string (or Expression with resultType string). + :type service_uri: object + :param organization_name: The organization name of the Dynamics CRM instance. The property is + required for on-prem and required for online when there are more than one Dynamics CRM + instances associated with the user. Type: string (or Expression with resultType string). + :type organization_name: object + :param authentication_type: Required. The authentication type to connect to Dynamics CRM + server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, + 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or + Expression with resultType string). Possible values include: "Office365", "Ifd", + "AADServicePrincipal". + :type authentication_type: str or ~azure.synapse.artifacts.models.DynamicsAuthenticationType + :param username: User name to access the Dynamics CRM instance. Type: string (or Expression + with resultType string). + :type username: object + :param password: Password to access the Dynamics CRM instance. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param service_principal_id: The client ID of the application in Azure Active Directory used + for Server-To-Server authentication. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_credential_type: The service principal credential type to use in + Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' + for certificate. Type: string (or Expression with resultType string). Possible values include: + "ServicePrincipalKey", "ServicePrincipalCert". + :type service_principal_credential_type: str or + ~azure.synapse.artifacts.models.DynamicsServicePrincipalCredentialType + :param service_principal_credential: The credential of the service principal object in Azure + Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', + servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If + servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only + be AzureKeyVaultSecretReference. + :type service_principal_credential: ~azure.synapse.artifacts.models.SecretBase + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'deployment_type': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, + 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, + 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'str'}, + 'service_principal_credential': {'key': 'typeProperties.servicePrincipalCredential', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(DynamicsCrmLinkedService, self).__init__(**kwargs) + self.type = 'DynamicsCrm' # type: str + self.deployment_type = kwargs['deployment_type'] + self.host_name = kwargs.get('host_name', None) + self.port = kwargs.get('port', None) + self.service_uri = kwargs.get('service_uri', None) + self.organization_name = kwargs.get('organization_name', None) + self.authentication_type = kwargs['authentication_type'] + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_credential_type = kwargs.get('service_principal_credential_type', None) + self.service_principal_credential = kwargs.get('service_principal_credential', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + + +class DynamicsCrmSink(CopySink): + """A copy activity Dynamics CRM sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param write_behavior: Required. The write behavior for the operation. Possible values include: + "Upsert". + :type write_behavior: str or ~azure.synapse.artifacts.models.DynamicsSinkWriteBehavior + :param ignore_null_values: The flag indicating whether to ignore null values from input dataset + (except key fields) during write operation. Default is false. Type: boolean (or Expression with + resultType boolean). + :type ignore_null_values: object + :param alternate_key_name: The logical name of the alternate key which will be used when + upserting records. Type: string (or Expression with resultType string). + :type alternate_key_name: object + """ + + _validation = { + 'type': {'required': True}, + 'write_behavior': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, + 'alternate_key_name': {'key': 'alternateKeyName', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(DynamicsCrmSink, self).__init__(**kwargs) + self.type = 'DynamicsCrmSink' # type: str + self.write_behavior = kwargs['write_behavior'] + self.ignore_null_values = kwargs.get('ignore_null_values', None) + self.alternate_key_name = kwargs.get('alternate_key_name', None) + + +class DynamicsCrmSource(CopySource): + """A copy activity Dynamics CRM source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query: FetchXML is a proprietary query language that is used in Microsoft Dynamics CRM + (online & on-premises). Type: string (or Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(DynamicsCrmSource, self).__init__(**kwargs) + self.type = 'DynamicsCrmSource' # type: str + self.query = kwargs.get('query', None) + + +class DynamicsEntityDataset(Dataset): + """The Dynamics entity dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param entity_name: The logical name of the entity. Type: string (or Expression with resultType + string). + :type entity_name: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(DynamicsEntityDataset, self).__init__(**kwargs) + self.type = 'DynamicsEntity' # type: str + self.entity_name = kwargs.get('entity_name', None) + + +class DynamicsLinkedService(LinkedService): + """Dynamics linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param deployment_type: Required. The deployment type of the Dynamics instance. 'Online' for + Dynamics Online and 'OnPremisesWithIfd' for Dynamics on-premises with Ifd. Type: string (or + Expression with resultType string). Possible values include: "Online", "OnPremisesWithIfd". + :type deployment_type: str or ~azure.synapse.artifacts.models.DynamicsDeploymentType + :param host_name: The host name of the on-premises Dynamics server. The property is required + for on-prem and not allowed for online. Type: string (or Expression with resultType string). + :type host_name: str + :param port: The port of on-premises Dynamics server. The property is required for on-prem and + not allowed for online. Default is 443. Type: integer (or Expression with resultType integer), + minimum: 0. + :type port: str + :param service_uri: The URL to the Microsoft Dynamics server. The property is required for on- + line and not allowed for on-prem. Type: string (or Expression with resultType string). + :type service_uri: str + :param organization_name: The organization name of the Dynamics instance. The property is + required for on-prem and required for online when there are more than one Dynamics instances + associated with the user. Type: string (or Expression with resultType string). + :type organization_name: str + :param authentication_type: Required. The authentication type to connect to Dynamics server. + 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, 'AADServicePrincipal' + for Server-To-Server authentication in online scenario. Type: string (or Expression with + resultType string). Possible values include: "Office365", "Ifd", "AADServicePrincipal". + :type authentication_type: str or ~azure.synapse.artifacts.models.DynamicsAuthenticationType + :param username: User name to access the Dynamics instance. Type: string (or Expression with + resultType string). + :type username: object + :param password: Password to access the Dynamics instance. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param service_principal_id: The client ID of the application in Azure Active Directory used + for Server-To-Server authentication. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_credential_type: The service principal credential type to use in + Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' + for certificate. Type: string (or Expression with resultType string). Possible values include: + "ServicePrincipalKey", "ServicePrincipalCert". + :type service_principal_credential_type: str or + ~azure.synapse.artifacts.models.DynamicsServicePrincipalCredentialType + :param service_principal_credential: The credential of the service principal object in Azure + Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', + servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If + servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only + be AzureKeyVaultSecretReference. + :type service_principal_credential: ~azure.synapse.artifacts.models.SecretBase + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'deployment_type': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, + 'host_name': {'key': 'typeProperties.hostName', 'type': 'str'}, + 'port': {'key': 'typeProperties.port', 'type': 'str'}, + 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'str'}, + 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'str'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'str'}, + 'service_principal_credential': {'key': 'typeProperties.servicePrincipalCredential', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(DynamicsLinkedService, self).__init__(**kwargs) + self.type = 'Dynamics' # type: str + self.deployment_type = kwargs['deployment_type'] + self.host_name = kwargs.get('host_name', None) + self.port = kwargs.get('port', None) + self.service_uri = kwargs.get('service_uri', None) + self.organization_name = kwargs.get('organization_name', None) + self.authentication_type = kwargs['authentication_type'] + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_credential_type = kwargs.get('service_principal_credential_type', None) + self.service_principal_credential = kwargs.get('service_principal_credential', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + + +class DynamicsSink(CopySink): + """A copy activity Dynamics sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param write_behavior: Required. The write behavior for the operation. Possible values include: + "Upsert". + :type write_behavior: str or ~azure.synapse.artifacts.models.DynamicsSinkWriteBehavior + :param ignore_null_values: The flag indicating whether ignore null values from input dataset + (except key fields) during write operation. Default is false. Type: boolean (or Expression with + resultType boolean). + :type ignore_null_values: object + :param alternate_key_name: The logical name of the alternate key which will be used when + upserting records. Type: string (or Expression with resultType string). + :type alternate_key_name: object + """ + + _validation = { + 'type': {'required': True}, + 'write_behavior': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, + 'alternate_key_name': {'key': 'alternateKeyName', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(DynamicsSink, self).__init__(**kwargs) + self.type = 'DynamicsSink' # type: str + self.write_behavior = kwargs['write_behavior'] + self.ignore_null_values = kwargs.get('ignore_null_values', None) + self.alternate_key_name = kwargs.get('alternate_key_name', None) + + +class DynamicsSource(CopySource): + """A copy activity Dynamics source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query: FetchXML is a proprietary query language that is used in Microsoft Dynamics + (online & on-premises). Type: string (or Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(DynamicsSource, self).__init__(**kwargs) + self.type = 'DynamicsSource' # type: str + self.query = kwargs.get('query', None) + + +class EloquaLinkedService(LinkedService): + """Eloqua server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param endpoint: Required. The endpoint of the Eloqua server. (i.e. eloqua.example.com). + :type endpoint: object + :param username: Required. The site name and user name of your Eloqua account in the form: + sitename/username. (i.e. Eloqua/Alice). + :type username: object + :param password: The password corresponding to the user name. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using + HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name in the server's + certificate to match the host name of the server when connecting over SSL. The default value is + true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of the server when + connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + 'username': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(EloquaLinkedService, self).__init__(**kwargs) + self.type = 'Eloqua' # type: str + self.endpoint = kwargs['endpoint'] + self.username = kwargs['username'] + self.password = kwargs.get('password', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + + +class EloquaObjectDataset(Dataset): + """Eloqua server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: The table name. Type: string (or Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(EloquaObjectDataset, self).__init__(**kwargs) + self.type = 'EloquaObject' # type: str + self.table_name = kwargs.get('table_name', None) + + +class EloquaSource(TabularSource): + """A copy activity Eloqua server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(EloquaSource, self).__init__(**kwargs) + self.type = 'EloquaSource' # type: str + self.query = kwargs.get('query', None) + + +class EntityReference(msrest.serialization.Model): + """The entity reference. + + :param type: The type of this referenced entity. Possible values include: + "IntegrationRuntimeReference", "LinkedServiceReference". + :type type: str or ~azure.synapse.artifacts.models.IntegrationRuntimeEntityReferenceType + :param reference_name: The name of this referenced entity. + :type reference_name: str + """ + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(EntityReference, self).__init__(**kwargs) + self.type = kwargs.get('type', None) + self.reference_name = kwargs.get('reference_name', None) + + +class ErrorAdditionalInfo(msrest.serialization.Model): + """The resource management error additional info. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar type: The additional info type. + :vartype type: str + :ivar info: The additional info. + :vartype info: object + """ + + _validation = { + 'type': {'readonly': True}, + 'info': {'readonly': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'info': {'key': 'info', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(ErrorAdditionalInfo, self).__init__(**kwargs) + self.type = None + self.info = None + + +class ErrorContract(msrest.serialization.Model): + """Contains details when the response code indicates an error. + + :param error: The error details. + :type error: ~azure.synapse.artifacts.models.ErrorResponse + """ + + _attribute_map = { + 'error': {'key': 'error', 'type': 'ErrorResponse'}, + } + + def __init__( + self, + **kwargs + ): + super(ErrorContract, self).__init__(**kwargs) + self.error = kwargs.get('error', None) + + +class ErrorResponse(msrest.serialization.Model): + """The resource management error response. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar code: The error code. + :vartype code: str + :ivar message: The error message. + :vartype message: str + :ivar target: The error target. + :vartype target: str + :ivar details: The error details. + :vartype details: list[~azure.synapse.artifacts.models.ErrorResponse] + :ivar additional_info: The error additional info. + :vartype additional_info: list[~azure.synapse.artifacts.models.ErrorAdditionalInfo] + """ + + _validation = { + 'code': {'readonly': True}, + 'message': {'readonly': True}, + 'target': {'readonly': True}, + 'details': {'readonly': True}, + 'additional_info': {'readonly': True}, + } + + _attribute_map = { + 'code': {'key': 'code', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + 'target': {'key': 'target', 'type': 'str'}, + 'details': {'key': 'details', 'type': '[ErrorResponse]'}, + 'additional_info': {'key': 'additionalInfo', 'type': '[ErrorAdditionalInfo]'}, + } + + def __init__( + self, + **kwargs + ): + super(ErrorResponse, self).__init__(**kwargs) + self.code = None + self.message = None + self.target = None + self.details = None + self.additional_info = None + + +class EvaluateDataFlowExpressionRequest(msrest.serialization.Model): + """Request body structure for data flow expression preview. + + :param session_id: The ID of data flow debug session. + :type session_id: str + :param data_flow_name: The data flow which contains the debug session. + :type data_flow_name: str + :param stream_name: The output stream name. + :type stream_name: str + :param row_limits: The row limit for preview request. + :type row_limits: int + :param expression: The expression for preview. + :type expression: str + """ + + _attribute_map = { + 'session_id': {'key': 'sessionId', 'type': 'str'}, + 'data_flow_name': {'key': 'dataFlowName', 'type': 'str'}, + 'stream_name': {'key': 'streamName', 'type': 'str'}, + 'row_limits': {'key': 'rowLimits', 'type': 'int'}, + 'expression': {'key': 'expression', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(EvaluateDataFlowExpressionRequest, self).__init__(**kwargs) + self.session_id = kwargs.get('session_id', None) + self.data_flow_name = kwargs.get('data_flow_name', None) + self.stream_name = kwargs.get('stream_name', None) + self.row_limits = kwargs.get('row_limits', None) + self.expression = kwargs.get('expression', None) + + +class ExecuteDataFlowActivity(ExecutionActivity): + """Execute data flow activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param linked_service_name: Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param data_flow: Required. Data flow reference. + :type data_flow: ~azure.synapse.artifacts.models.DataFlowReference + :param staging: Staging info for execute data flow activity. + :type staging: ~azure.synapse.artifacts.models.DataFlowStagingInfo + :param integration_runtime: The integration runtime reference. + :type integration_runtime: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param compute: Compute properties for data flow activity. + :type compute: ~azure.synapse.artifacts.models.ExecuteDataFlowActivityTypePropertiesCompute + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'data_flow': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'data_flow': {'key': 'typeProperties.dataFlow', 'type': 'DataFlowReference'}, + 'staging': {'key': 'typeProperties.staging', 'type': 'DataFlowStagingInfo'}, + 'integration_runtime': {'key': 'typeProperties.integrationRuntime', 'type': 'IntegrationRuntimeReference'}, + 'compute': {'key': 'typeProperties.compute', 'type': 'ExecuteDataFlowActivityTypePropertiesCompute'}, + } + + def __init__( + self, + **kwargs + ): + super(ExecuteDataFlowActivity, self).__init__(**kwargs) + self.type = 'ExecuteDataFlow' # type: str + self.data_flow = kwargs['data_flow'] + self.staging = kwargs.get('staging', None) + self.integration_runtime = kwargs.get('integration_runtime', None) + self.compute = kwargs.get('compute', None) + + +class ExecuteDataFlowActivityTypePropertiesCompute(msrest.serialization.Model): + """Compute properties for data flow activity. + + :param compute_type: Compute type of the cluster which will execute data flow job. Possible + values include: "General", "MemoryOptimized", "ComputeOptimized". + :type compute_type: str or ~azure.synapse.artifacts.models.DataFlowComputeType + :param core_count: Core count of the cluster which will execute data flow job. Supported values + are: 8, 16, 32, 48, 80, 144 and 272. + :type core_count: int + """ + + _attribute_map = { + 'compute_type': {'key': 'computeType', 'type': 'str'}, + 'core_count': {'key': 'coreCount', 'type': 'int'}, + } + + def __init__( + self, + **kwargs + ): + super(ExecuteDataFlowActivityTypePropertiesCompute, self).__init__(**kwargs) + self.compute_type = kwargs.get('compute_type', None) + self.core_count = kwargs.get('core_count', None) + + +class ExecutePipelineActivity(Activity): + """Execute pipeline activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param pipeline: Required. Pipeline reference. + :type pipeline: ~azure.synapse.artifacts.models.PipelineReference + :param parameters: Pipeline parameters. + :type parameters: dict[str, object] + :param wait_on_completion: Defines whether activity execution will wait for the dependent + pipeline execution to finish. Default is false. + :type wait_on_completion: bool + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'pipeline': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'pipeline': {'key': 'typeProperties.pipeline', 'type': 'PipelineReference'}, + 'parameters': {'key': 'typeProperties.parameters', 'type': '{object}'}, + 'wait_on_completion': {'key': 'typeProperties.waitOnCompletion', 'type': 'bool'}, + } + + def __init__( + self, + **kwargs + ): + super(ExecutePipelineActivity, self).__init__(**kwargs) + self.type = 'ExecutePipeline' # type: str + self.pipeline = kwargs['pipeline'] + self.parameters = kwargs.get('parameters', None) + self.wait_on_completion = kwargs.get('wait_on_completion', None) + + +class ExecuteSSISPackageActivity(ExecutionActivity): + """Execute SSIS package activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param linked_service_name: Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param package_location: Required. SSIS package location. + :type package_location: ~azure.synapse.artifacts.models.SSISPackageLocation + :param runtime: Specifies the runtime to execute SSIS package. The value should be "x86" or + "x64". Type: string (or Expression with resultType string). + :type runtime: object + :param logging_level: The logging level of SSIS package execution. Type: string (or Expression + with resultType string). + :type logging_level: object + :param environment_path: The environment path to execute the SSIS package. Type: string (or + Expression with resultType string). + :type environment_path: object + :param execution_credential: The package execution credential. + :type execution_credential: ~azure.synapse.artifacts.models.SSISExecutionCredential + :param connect_via: Required. The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param project_parameters: The project level parameters to execute the SSIS package. + :type project_parameters: dict[str, ~azure.synapse.artifacts.models.SSISExecutionParameter] + :param package_parameters: The package level parameters to execute the SSIS package. + :type package_parameters: dict[str, ~azure.synapse.artifacts.models.SSISExecutionParameter] + :param project_connection_managers: The project level connection managers to execute the SSIS + package. + :type project_connection_managers: dict[str, object] + :param package_connection_managers: The package level connection managers to execute the SSIS + package. + :type package_connection_managers: dict[str, object] + :param property_overrides: The property overrides to execute the SSIS package. + :type property_overrides: dict[str, ~azure.synapse.artifacts.models.SSISPropertyOverride] + :param log_location: SSIS package execution log location. + :type log_location: ~azure.synapse.artifacts.models.SSISLogLocation + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'package_location': {'required': True}, + 'connect_via': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'package_location': {'key': 'typeProperties.packageLocation', 'type': 'SSISPackageLocation'}, + 'runtime': {'key': 'typeProperties.runtime', 'type': 'object'}, + 'logging_level': {'key': 'typeProperties.loggingLevel', 'type': 'object'}, + 'environment_path': {'key': 'typeProperties.environmentPath', 'type': 'object'}, + 'execution_credential': {'key': 'typeProperties.executionCredential', 'type': 'SSISExecutionCredential'}, + 'connect_via': {'key': 'typeProperties.connectVia', 'type': 'IntegrationRuntimeReference'}, + 'project_parameters': {'key': 'typeProperties.projectParameters', 'type': '{SSISExecutionParameter}'}, + 'package_parameters': {'key': 'typeProperties.packageParameters', 'type': '{SSISExecutionParameter}'}, + 'project_connection_managers': {'key': 'typeProperties.projectConnectionManagers', 'type': '{object}'}, + 'package_connection_managers': {'key': 'typeProperties.packageConnectionManagers', 'type': '{object}'}, + 'property_overrides': {'key': 'typeProperties.propertyOverrides', 'type': '{SSISPropertyOverride}'}, + 'log_location': {'key': 'typeProperties.logLocation', 'type': 'SSISLogLocation'}, + } + + def __init__( + self, + **kwargs + ): + super(ExecuteSSISPackageActivity, self).__init__(**kwargs) + self.type = 'ExecuteSSISPackage' # type: str + self.package_location = kwargs['package_location'] + self.runtime = kwargs.get('runtime', None) + self.logging_level = kwargs.get('logging_level', None) + self.environment_path = kwargs.get('environment_path', None) + self.execution_credential = kwargs.get('execution_credential', None) + self.connect_via = kwargs['connect_via'] + self.project_parameters = kwargs.get('project_parameters', None) + self.package_parameters = kwargs.get('package_parameters', None) + self.project_connection_managers = kwargs.get('project_connection_managers', None) + self.package_connection_managers = kwargs.get('package_connection_managers', None) + self.property_overrides = kwargs.get('property_overrides', None) + self.log_location = kwargs.get('log_location', None) + + +class ExposureControlRequest(msrest.serialization.Model): + """The exposure control request. + + :param feature_name: The feature name. + :type feature_name: str + :param feature_type: The feature type. + :type feature_type: str + """ + + _attribute_map = { + 'feature_name': {'key': 'featureName', 'type': 'str'}, + 'feature_type': {'key': 'featureType', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ExposureControlRequest, self).__init__(**kwargs) + self.feature_name = kwargs.get('feature_name', None) + self.feature_type = kwargs.get('feature_type', None) + + +class ExposureControlResponse(msrest.serialization.Model): + """The exposure control response. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar feature_name: The feature name. + :vartype feature_name: str + :ivar value: The feature value. + :vartype value: str + """ + + _validation = { + 'feature_name': {'readonly': True}, + 'value': {'readonly': True}, + } + + _attribute_map = { + 'feature_name': {'key': 'featureName', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ExposureControlResponse, self).__init__(**kwargs) + self.feature_name = None + self.value = None + + +class Expression(msrest.serialization.Model): + """Azure Synapse expression definition. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Expression type. Possible values include: "Expression". + :type type: str or ~azure.synapse.artifacts.models.ExpressionType + :param value: Required. Expression value. + :type value: str + """ + + _validation = { + 'type': {'required': True}, + 'value': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(Expression, self).__init__(**kwargs) + self.type = kwargs['type'] + self.value = kwargs['value'] + + +class FileServerLinkedService(LinkedService): + """File system linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param host: Required. Host name of the server. Type: string (or Expression with resultType + string). + :type host: object + :param user_id: User ID to logon the server. Type: string (or Expression with resultType + string). + :type user_id: object + :param password: Password to logon the server. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'user_id': {'key': 'typeProperties.userId', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(FileServerLinkedService, self).__init__(**kwargs) + self.type = 'FileServer' # type: str + self.host = kwargs['host'] + self.user_id = kwargs.get('user_id', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + + +class FileServerLocation(DatasetLocation): + """The location of file server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location.Constant filled by server. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(FileServerLocation, self).__init__(**kwargs) + self.type = 'FileServerLocation' # type: str + + +class FileServerReadSettings(StoreReadSettings): + """File server read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The read setting type.Constant filled by server. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :type recursive: object + :param wildcard_folder_path: FileServer wildcardFolderPath. Type: string (or Expression with + resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: FileServer wildcardFileName. Type: string (or Expression with + resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. Type: string (or + Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression + with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(FileServerReadSettings, self).__init__(**kwargs) + self.type = 'FileServerReadSettings' # type: str + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) + + +class FileServerWriteSettings(StoreWriteSettings): + """File server write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The write setting type.Constant filled by server. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(FileServerWriteSettings, self).__init__(**kwargs) + self.type = 'FileServerWriteSettings' # type: str + + +class FileSystemSink(CopySink): + """A copy activity file system sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(FileSystemSink, self).__init__(**kwargs) + self.type = 'FileSystemSink' # type: str + self.copy_behavior = kwargs.get('copy_behavior', None) + + +class FileSystemSource(CopySource): + """A copy activity file system source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :type recursive: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(FileSystemSource, self).__init__(**kwargs) + self.type = 'FileSystemSource' # type: str + self.recursive = kwargs.get('recursive', None) + + +class FilterActivity(Activity): + """Filter and return results from input array based on the conditions. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param items: Required. Input array on which filter should be applied. + :type items: ~azure.synapse.artifacts.models.Expression + :param condition: Required. Condition to be used for filtering the input. + :type condition: ~azure.synapse.artifacts.models.Expression + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'items': {'required': True}, + 'condition': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'items': {'key': 'typeProperties.items', 'type': 'Expression'}, + 'condition': {'key': 'typeProperties.condition', 'type': 'Expression'}, + } + + def __init__( + self, + **kwargs + ): + super(FilterActivity, self).__init__(**kwargs) + self.type = 'Filter' # type: str + self.items = kwargs['items'] + self.condition = kwargs['condition'] + + +class ForEachActivity(Activity): + """This activity is used for iterating over a collection and execute given activities. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param is_sequential: Should the loop be executed in sequence or in parallel (max 50). + :type is_sequential: bool + :param batch_count: Batch count to be used for controlling the number of parallel execution + (when isSequential is set to false). + :type batch_count: int + :param items: Required. Collection to iterate. + :type items: ~azure.synapse.artifacts.models.Expression + :param activities: Required. List of activities to execute . + :type activities: list[~azure.synapse.artifacts.models.Activity] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'batch_count': {'maximum': 50}, + 'items': {'required': True}, + 'activities': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'is_sequential': {'key': 'typeProperties.isSequential', 'type': 'bool'}, + 'batch_count': {'key': 'typeProperties.batchCount', 'type': 'int'}, + 'items': {'key': 'typeProperties.items', 'type': 'Expression'}, + 'activities': {'key': 'typeProperties.activities', 'type': '[Activity]'}, + } + + def __init__( + self, + **kwargs + ): + super(ForEachActivity, self).__init__(**kwargs) + self.type = 'ForEach' # type: str + self.is_sequential = kwargs.get('is_sequential', None) + self.batch_count = kwargs.get('batch_count', None) + self.items = kwargs['items'] + self.activities = kwargs['activities'] + + +class FtpReadSettings(StoreReadSettings): + """Ftp read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The read setting type.Constant filled by server. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :type recursive: object + :param wildcard_folder_path: Ftp wildcardFolderPath. Type: string (or Expression with + resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Ftp wildcardFileName. Type: string (or Expression with resultType + string). + :type wildcard_file_name: object + :param use_binary_transfer: Specify whether to use binary transfer mode for FTP stores. + :type use_binary_transfer: bool + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'use_binary_transfer': {'key': 'useBinaryTransfer', 'type': 'bool'}, + } + + def __init__( + self, + **kwargs + ): + super(FtpReadSettings, self).__init__(**kwargs) + self.type = 'FtpReadSettings' # type: str + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.use_binary_transfer = kwargs.get('use_binary_transfer', None) + + +class FtpServerLinkedService(LinkedService): + """A FTP server Linked Service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param host: Required. Host name of the FTP server. Type: string (or Expression with resultType + string). + :type host: object + :param port: The TCP port number that the FTP server uses to listen for client connections. + Default value is 21. Type: integer (or Expression with resultType integer), minimum: 0. + :type port: object + :param authentication_type: The authentication type to be used to connect to the FTP server. + Possible values include: "Basic", "Anonymous". + :type authentication_type: str or ~azure.synapse.artifacts.models.FtpAuthenticationType + :param user_name: Username to logon the FTP server. Type: string (or Expression with resultType + string). + :type user_name: object + :param password: Password to logon the FTP server. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + :param enable_ssl: If true, connect to the FTP server over SSL/TLS channel. Default value is + true. Type: boolean (or Expression with resultType boolean). + :type enable_ssl: object + :param enable_server_certificate_validation: If true, validate the FTP server SSL certificate + when connect over SSL/TLS channel. Default value is true. Type: boolean (or Expression with + resultType boolean). + :type enable_server_certificate_validation: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(FtpServerLinkedService, self).__init__(**kwargs) + self.type = 'FtpServer' # type: str + self.host = kwargs['host'] + self.port = kwargs.get('port', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.enable_ssl = kwargs.get('enable_ssl', None) + self.enable_server_certificate_validation = kwargs.get('enable_server_certificate_validation', None) + + +class FtpServerLocation(DatasetLocation): + """The location of ftp server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location.Constant filled by server. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(FtpServerLocation, self).__init__(**kwargs) + self.type = 'FtpServerLocation' # type: str + + +class GetMetadataActivity(ExecutionActivity): + """Activity to get metadata of dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param linked_service_name: Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param dataset: Required. GetMetadata activity dataset reference. + :type dataset: ~azure.synapse.artifacts.models.DatasetReference + :param field_list: Fields of metadata to get from dataset. + :type field_list: list[object] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'dataset': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, + 'field_list': {'key': 'typeProperties.fieldList', 'type': '[object]'}, + } + + def __init__( + self, + **kwargs + ): + super(GetMetadataActivity, self).__init__(**kwargs) + self.type = 'GetMetadata' # type: str + self.dataset = kwargs['dataset'] + self.field_list = kwargs.get('field_list', None) + + +class GetSsisObjectMetadataRequest(msrest.serialization.Model): + """The request payload of get SSIS object metadata. + + :param metadata_path: Metadata path. + :type metadata_path: str + """ + + _attribute_map = { + 'metadata_path': {'key': 'metadataPath', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(GetSsisObjectMetadataRequest, self).__init__(**kwargs) + self.metadata_path = kwargs.get('metadata_path', None) + + +class GoogleAdWordsLinkedService(LinkedService): + """Google AdWords service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param client_customer_id: Required. The Client customer ID of the AdWords account that you + want to fetch report data for. + :type client_customer_id: object + :param developer_token: Required. The developer token associated with the manager account that + you use to grant access to the AdWords API. + :type developer_token: ~azure.synapse.artifacts.models.SecretBase + :param authentication_type: Required. The OAuth 2.0 authentication mechanism used for + authentication. ServiceAuthentication can only be used on self-hosted IR. Possible values + include: "ServiceAuthentication", "UserAuthentication". + :type authentication_type: str or + ~azure.synapse.artifacts.models.GoogleAdWordsAuthenticationType + :param refresh_token: The refresh token obtained from Google for authorizing access to AdWords + for UserAuthentication. + :type refresh_token: ~azure.synapse.artifacts.models.SecretBase + :param client_id: The client id of the google application used to acquire the refresh token. + Type: string (or Expression with resultType string). + :type client_id: object + :param client_secret: The client secret of the google application used to acquire the refresh + token. + :type client_secret: ~azure.synapse.artifacts.models.SecretBase + :param email: The service account email ID that is used for ServiceAuthentication and can only + be used on self-hosted IR. + :type email: object + :param key_file_path: The full path to the .p12 key file that is used to authenticate the + service account email address and can only be used on self-hosted IR. + :type key_file_path: object + :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for + verifying the server when connecting over SSL. This property can only be set when using SSL on + self-hosted IR. The default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust + store or from a specified PEM file. The default value is false. + :type use_system_trust_store: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'client_customer_id': {'required': True}, + 'developer_token': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'client_customer_id': {'key': 'typeProperties.clientCustomerID', 'type': 'object'}, + 'developer_token': {'key': 'typeProperties.developerToken', 'type': 'SecretBase'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'email': {'key': 'typeProperties.email', 'type': 'object'}, + 'key_file_path': {'key': 'typeProperties.keyFilePath', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(GoogleAdWordsLinkedService, self).__init__(**kwargs) + self.type = 'GoogleAdWords' # type: str + self.client_customer_id = kwargs['client_customer_id'] + self.developer_token = kwargs['developer_token'] + self.authentication_type = kwargs['authentication_type'] + self.refresh_token = kwargs.get('refresh_token', None) + self.client_id = kwargs.get('client_id', None) + self.client_secret = kwargs.get('client_secret', None) + self.email = kwargs.get('email', None) + self.key_file_path = kwargs.get('key_file_path', None) + self.trusted_cert_path = kwargs.get('trusted_cert_path', None) + self.use_system_trust_store = kwargs.get('use_system_trust_store', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + + +class GoogleAdWordsObjectDataset(Dataset): + """Google AdWords service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: The table name. Type: string (or Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(GoogleAdWordsObjectDataset, self).__init__(**kwargs) + self.type = 'GoogleAdWordsObject' # type: str + self.table_name = kwargs.get('table_name', None) + + +class GoogleAdWordsSource(TabularSource): + """A copy activity Google AdWords service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(GoogleAdWordsSource, self).__init__(**kwargs) + self.type = 'GoogleAdWordsSource' # type: str + self.query = kwargs.get('query', None) + + +class GoogleBigQueryLinkedService(LinkedService): + """Google BigQuery service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param project: Required. The default BigQuery project to query against. + :type project: object + :param additional_projects: A comma-separated list of public BigQuery projects to access. + :type additional_projects: object + :param request_google_drive_scope: Whether to request access to Google Drive. Allowing Google + Drive access enables support for federated tables that combine BigQuery data with data from + Google Drive. The default value is false. + :type request_google_drive_scope: object + :param authentication_type: Required. The OAuth 2.0 authentication mechanism used for + authentication. ServiceAuthentication can only be used on self-hosted IR. Possible values + include: "ServiceAuthentication", "UserAuthentication". + :type authentication_type: str or + ~azure.synapse.artifacts.models.GoogleBigQueryAuthenticationType + :param refresh_token: The refresh token obtained from Google for authorizing access to BigQuery + for UserAuthentication. + :type refresh_token: ~azure.synapse.artifacts.models.SecretBase + :param client_id: The client id of the google application used to acquire the refresh token. + Type: string (or Expression with resultType string). + :type client_id: object + :param client_secret: The client secret of the google application used to acquire the refresh + token. + :type client_secret: ~azure.synapse.artifacts.models.SecretBase + :param email: The service account email ID that is used for ServiceAuthentication and can only + be used on self-hosted IR. + :type email: object + :param key_file_path: The full path to the .p12 key file that is used to authenticate the + service account email address and can only be used on self-hosted IR. + :type key_file_path: object + :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for + verifying the server when connecting over SSL. This property can only be set when using SSL on + self-hosted IR. The default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust + store or from a specified PEM file. The default value is false. + :type use_system_trust_store: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'project': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'project': {'key': 'typeProperties.project', 'type': 'object'}, + 'additional_projects': {'key': 'typeProperties.additionalProjects', 'type': 'object'}, + 'request_google_drive_scope': {'key': 'typeProperties.requestGoogleDriveScope', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'email': {'key': 'typeProperties.email', 'type': 'object'}, + 'key_file_path': {'key': 'typeProperties.keyFilePath', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(GoogleBigQueryLinkedService, self).__init__(**kwargs) + self.type = 'GoogleBigQuery' # type: str + self.project = kwargs['project'] + self.additional_projects = kwargs.get('additional_projects', None) + self.request_google_drive_scope = kwargs.get('request_google_drive_scope', None) + self.authentication_type = kwargs['authentication_type'] + self.refresh_token = kwargs.get('refresh_token', None) + self.client_id = kwargs.get('client_id', None) + self.client_secret = kwargs.get('client_secret', None) + self.email = kwargs.get('email', None) + self.key_file_path = kwargs.get('key_file_path', None) + self.trusted_cert_path = kwargs.get('trusted_cert_path', None) + self.use_system_trust_store = kwargs.get('use_system_trust_store', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + + +class GoogleBigQueryObjectDataset(Dataset): + """Google BigQuery service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: This property will be retired. Please consider using database + table + properties instead. + :type table_name: object + :param table: The table name of the Google BigQuery. Type: string (or Expression with + resultType string). + :type table: object + :param dataset: The database name of the Google BigQuery. Type: string (or Expression with + resultType string). + :type dataset: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'dataset': {'key': 'typeProperties.dataset', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(GoogleBigQueryObjectDataset, self).__init__(**kwargs) + self.type = 'GoogleBigQueryObject' # type: str + self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.dataset = kwargs.get('dataset', None) + + +class GoogleBigQuerySource(TabularSource): + """A copy activity Google BigQuery service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(GoogleBigQuerySource, self).__init__(**kwargs) + self.type = 'GoogleBigQuerySource' # type: str + self.query = kwargs.get('query', None) + + +class GoogleCloudStorageLinkedService(LinkedService): + """Linked service for Google Cloud Storage. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param access_key_id: The access key identifier of the Google Cloud Storage Identity and Access + Management (IAM) user. Type: string (or Expression with resultType string). + :type access_key_id: object + :param secret_access_key: The secret access key of the Google Cloud Storage Identity and Access + Management (IAM) user. + :type secret_access_key: ~azure.synapse.artifacts.models.SecretBase + :param service_url: This value specifies the endpoint to access with the Google Cloud Storage + Connector. This is an optional property; change it only if you want to try a different service + endpoint or want to switch between https and http. Type: string (or Expression with resultType + string). + :type service_url: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, + 'secret_access_key': {'key': 'typeProperties.secretAccessKey', 'type': 'SecretBase'}, + 'service_url': {'key': 'typeProperties.serviceUrl', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(GoogleCloudStorageLinkedService, self).__init__(**kwargs) + self.type = 'GoogleCloudStorage' # type: str + self.access_key_id = kwargs.get('access_key_id', None) + self.secret_access_key = kwargs.get('secret_access_key', None) + self.service_url = kwargs.get('service_url', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + + +class GoogleCloudStorageLocation(DatasetLocation): + """The location of Google Cloud Storage dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location.Constant filled by server. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + string). + :type file_name: object + :param bucket_name: Specify the bucketName of Google Cloud Storage. Type: string (or Expression + with resultType string). + :type bucket_name: object + :param version: Specify the version of Google Cloud Storage. Type: string (or Expression with + resultType string). + :type version: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'bucket_name': {'key': 'bucketName', 'type': 'object'}, + 'version': {'key': 'version', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(GoogleCloudStorageLocation, self).__init__(**kwargs) + self.type = 'GoogleCloudStorageLocation' # type: str + self.bucket_name = kwargs.get('bucket_name', None) + self.version = kwargs.get('version', None) + + +class GoogleCloudStorageReadSettings(StoreReadSettings): + """Google Cloud Storage read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The read setting type.Constant filled by server. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :type recursive: object + :param wildcard_folder_path: Google Cloud Storage wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Google Cloud Storage wildcardFileName. Type: string (or Expression + with resultType string). + :type wildcard_file_name: object + :param prefix: The prefix filter for the Google Cloud Storage object name. Type: string (or + Expression with resultType string). + :type prefix: object + :param enable_partition_discovery: Indicates whether to enable partition discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. Type: string (or + Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression + with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'prefix': {'key': 'prefix', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(GoogleCloudStorageReadSettings, self).__init__(**kwargs) + self.type = 'GoogleCloudStorageReadSettings' # type: str + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.prefix = kwargs.get('prefix', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) + + +class GreenplumLinkedService(LinkedService): + """Greenplum Database linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param connection_string: An ODBC connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param pwd: The Azure key vault secret reference of password in connection string. + :type pwd: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(GreenplumLinkedService, self).__init__(**kwargs) + self.type = 'Greenplum' # type: str + self.connection_string = kwargs.get('connection_string', None) + self.pwd = kwargs.get('pwd', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + + +class GreenplumSource(TabularSource): + """A copy activity Greenplum Database source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(GreenplumSource, self).__init__(**kwargs) + self.type = 'GreenplumSource' # type: str + self.query = kwargs.get('query', None) + + +class GreenplumTableDataset(Dataset): + """Greenplum Database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: This property will be retired. Please consider using schema + table + properties instead. + :type table_name: object + :param table: The table name of Greenplum. Type: string (or Expression with resultType string). + :type table: object + :param schema_type_properties_schema: The schema name of Greenplum. Type: string (or Expression + with resultType string). + :type schema_type_properties_schema: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(GreenplumTableDataset, self).__init__(**kwargs) + self.type = 'GreenplumTable' # type: str + self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) + + +class HBaseLinkedService(LinkedService): + """HBase server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param host: Required. The IP address or host name of the HBase server. (i.e. 192.168.222.160). + :type host: object + :param port: The TCP port that the HBase instance uses to listen for client connections. The + default value is 9090. + :type port: object + :param http_path: The partial URL corresponding to the HBase server. (i.e. + /gateway/sandbox/hbase/version). + :type http_path: object + :param authentication_type: Required. The authentication mechanism to use to connect to the + HBase server. Possible values include: "Anonymous", "Basic". + :type authentication_type: str or ~azure.synapse.artifacts.models.HBaseAuthenticationType + :param username: The user name used to connect to the HBase instance. + :type username: object + :param password: The password corresponding to the user name. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The + default value is false. + :type enable_ssl: object + :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for + verifying the server when connecting over SSL. This property can only be set when using SSL on + self-hosted IR. The default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate + name to match the host name of the server when connecting over SSL. The default value is false. + :type allow_host_name_cn_mismatch: object + :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from + the server. The default value is false. + :type allow_self_signed_server_cert: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, + 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(HBaseLinkedService, self).__init__(**kwargs) + self.type = 'HBase' # type: str + self.host = kwargs['host'] + self.port = kwargs.get('port', None) + self.http_path = kwargs.get('http_path', None) + self.authentication_type = kwargs['authentication_type'] + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.enable_ssl = kwargs.get('enable_ssl', None) + self.trusted_cert_path = kwargs.get('trusted_cert_path', None) + self.allow_host_name_cn_mismatch = kwargs.get('allow_host_name_cn_mismatch', None) + self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + + +class HBaseObjectDataset(Dataset): + """HBase server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: The table name. Type: string (or Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(HBaseObjectDataset, self).__init__(**kwargs) + self.type = 'HBaseObject' # type: str + self.table_name = kwargs.get('table_name', None) + + +class HBaseSource(TabularSource): + """A copy activity HBase server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(HBaseSource, self).__init__(**kwargs) + self.type = 'HBaseSource' # type: str + self.query = kwargs.get('query', None) + + +class HdfsLinkedService(LinkedService): + """Hadoop Distributed File System (HDFS) linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param url: Required. The URL of the HDFS service endpoint, e.g. + http://myhostname:50070/webhdfs/v1 . Type: string (or Expression with resultType string). + :type url: object + :param authentication_type: Type of authentication used to connect to the HDFS. Possible values + are: Anonymous and Windows. Type: string (or Expression with resultType string). + :type authentication_type: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + :param user_name: User name for Windows authentication. Type: string (or Expression with + resultType string). + :type user_name: object + :param password: Password for Windows authentication. + :type password: ~azure.synapse.artifacts.models.SecretBase + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + } + + def __init__( + self, + **kwargs + ): + super(HdfsLinkedService, self).__init__(**kwargs) + self.type = 'Hdfs' # type: str + self.url = kwargs['url'] + self.authentication_type = kwargs.get('authentication_type', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + + +class HdfsLocation(DatasetLocation): + """The location of HDFS. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location.Constant filled by server. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(HdfsLocation, self).__init__(**kwargs) + self.type = 'HdfsLocation' # type: str + + +class HdfsReadSettings(StoreReadSettings): + """HDFS read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The read setting type.Constant filled by server. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :type recursive: object + :param wildcard_folder_path: HDFS wildcardFolderPath. Type: string (or Expression with + resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: HDFS wildcardFileName. Type: string (or Expression with resultType + string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. Type: string (or + Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression + with resultType string). + :type modified_datetime_end: object + :param distcp_settings: Specifies Distcp-related settings. + :type distcp_settings: ~azure.synapse.artifacts.models.DistcpSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + 'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'}, + } + + def __init__( + self, + **kwargs + ): + super(HdfsReadSettings, self).__init__(**kwargs) + self.type = 'HdfsReadSettings' # type: str + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) + self.distcp_settings = kwargs.get('distcp_settings', None) + + +class HdfsSource(CopySource): + """A copy activity HDFS source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :type recursive: object + :param distcp_settings: Specifies Distcp-related settings. + :type distcp_settings: ~azure.synapse.artifacts.models.DistcpSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'}, + } + + def __init__( + self, + **kwargs + ): + super(HdfsSource, self).__init__(**kwargs) + self.type = 'HdfsSource' # type: str + self.recursive = kwargs.get('recursive', None) + self.distcp_settings = kwargs.get('distcp_settings', None) + + +class HDInsightHiveActivity(ExecutionActivity): + """HDInsight Hive activity type. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param linked_service_name: Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param storage_linked_services: Storage linked service references. + :type storage_linked_services: list[~azure.synapse.artifacts.models.LinkedServiceReference] + :param arguments: User specified arguments to HDInsightActivity. + :type arguments: list[object] + :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". + :type get_debug_info: str or ~azure.synapse.artifacts.models.HDInsightActivityDebugInfoOption + :param script_path: Script path. Type: string (or Expression with resultType string). + :type script_path: object + :param script_linked_service: Script linked service reference. + :type script_linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference + :param defines: Allows user to specify defines for Hive job request. + :type defines: dict[str, object] + :param variables: User specified arguments under hivevar namespace. + :type variables: list[object] + :param query_timeout: Query timeout value (in minutes). Effective when the HDInsight cluster + is with ESP (Enterprise Security Package). + :type query_timeout: int + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, + 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, + 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, + 'script_path': {'key': 'typeProperties.scriptPath', 'type': 'object'}, + 'script_linked_service': {'key': 'typeProperties.scriptLinkedService', 'type': 'LinkedServiceReference'}, + 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, + 'variables': {'key': 'typeProperties.variables', 'type': '[object]'}, + 'query_timeout': {'key': 'typeProperties.queryTimeout', 'type': 'int'}, + } + + def __init__( + self, + **kwargs + ): + super(HDInsightHiveActivity, self).__init__(**kwargs) + self.type = 'HDInsightHive' # type: str + self.storage_linked_services = kwargs.get('storage_linked_services', None) + self.arguments = kwargs.get('arguments', None) + self.get_debug_info = kwargs.get('get_debug_info', None) + self.script_path = kwargs.get('script_path', None) + self.script_linked_service = kwargs.get('script_linked_service', None) + self.defines = kwargs.get('defines', None) + self.variables = kwargs.get('variables', None) + self.query_timeout = kwargs.get('query_timeout', None) + + +class HDInsightLinkedService(LinkedService): + """HDInsight linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param cluster_uri: Required. HDInsight cluster URI. Type: string (or Expression with + resultType string). + :type cluster_uri: object + :param user_name: HDInsight cluster user name. Type: string (or Expression with resultType + string). + :type user_name: object + :param password: HDInsight cluster password. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param linked_service_name: The Azure Storage linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param hcatalog_linked_service_name: A reference to the Azure SQL linked service that points to + the HCatalog database. + :type hcatalog_linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + :param is_esp_enabled: Specify if the HDInsight is created with ESP (Enterprise Security + Package). Type: Boolean. + :type is_esp_enabled: object + :param file_system: Specify the FileSystem if the main storage for the HDInsight is ADLS Gen2. + Type: string (or Expression with resultType string). + :type file_system: object + """ + + _validation = { + 'type': {'required': True}, + 'cluster_uri': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'cluster_uri': {'key': 'typeProperties.clusterUri', 'type': 'object'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, + 'hcatalog_linked_service_name': {'key': 'typeProperties.hcatalogLinkedServiceName', 'type': 'LinkedServiceReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'is_esp_enabled': {'key': 'typeProperties.isEspEnabled', 'type': 'object'}, + 'file_system': {'key': 'typeProperties.fileSystem', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(HDInsightLinkedService, self).__init__(**kwargs) + self.type = 'HDInsight' # type: str + self.cluster_uri = kwargs['cluster_uri'] + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.linked_service_name = kwargs.get('linked_service_name', None) + self.hcatalog_linked_service_name = kwargs.get('hcatalog_linked_service_name', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.is_esp_enabled = kwargs.get('is_esp_enabled', None) + self.file_system = kwargs.get('file_system', None) + + +class HDInsightMapReduceActivity(ExecutionActivity): + """HDInsight MapReduce activity type. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param linked_service_name: Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param storage_linked_services: Storage linked service references. + :type storage_linked_services: list[~azure.synapse.artifacts.models.LinkedServiceReference] + :param arguments: User specified arguments to HDInsightActivity. + :type arguments: list[object] + :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". + :type get_debug_info: str or ~azure.synapse.artifacts.models.HDInsightActivityDebugInfoOption + :param class_name: Required. Class name. Type: string (or Expression with resultType string). + :type class_name: object + :param jar_file_path: Required. Jar path. Type: string (or Expression with resultType string). + :type jar_file_path: object + :param jar_linked_service: Jar linked service reference. + :type jar_linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference + :param jar_libs: Jar libs. + :type jar_libs: list[object] + :param defines: Allows user to specify defines for the MapReduce job request. + :type defines: dict[str, object] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'class_name': {'required': True}, + 'jar_file_path': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, + 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, + 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, + 'class_name': {'key': 'typeProperties.className', 'type': 'object'}, + 'jar_file_path': {'key': 'typeProperties.jarFilePath', 'type': 'object'}, + 'jar_linked_service': {'key': 'typeProperties.jarLinkedService', 'type': 'LinkedServiceReference'}, + 'jar_libs': {'key': 'typeProperties.jarLibs', 'type': '[object]'}, + 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, + } + + def __init__( + self, + **kwargs + ): + super(HDInsightMapReduceActivity, self).__init__(**kwargs) + self.type = 'HDInsightMapReduce' # type: str + self.storage_linked_services = kwargs.get('storage_linked_services', None) + self.arguments = kwargs.get('arguments', None) + self.get_debug_info = kwargs.get('get_debug_info', None) + self.class_name = kwargs['class_name'] + self.jar_file_path = kwargs['jar_file_path'] + self.jar_linked_service = kwargs.get('jar_linked_service', None) + self.jar_libs = kwargs.get('jar_libs', None) + self.defines = kwargs.get('defines', None) + + +class HDInsightOnDemandLinkedService(LinkedService): + """HDInsight ondemand linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param cluster_size: Required. Number of worker/data nodes in the cluster. Suggestion value: 4. + Type: string (or Expression with resultType string). + :type cluster_size: object + :param time_to_live: Required. The allowed idle time for the on-demand HDInsight cluster. + Specifies how long the on-demand HDInsight cluster stays alive after completion of an activity + run if there are no other active jobs in the cluster. The minimum value is 5 mins. Type: string + (or Expression with resultType string). + :type time_to_live: object + :param version: Required. Version of the HDInsight cluster.  Type: string (or Expression with + resultType string). + :type version: object + :param linked_service_name: Required. Azure Storage linked service to be used by the on-demand + cluster for storing and processing data. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param host_subscription_id: Required. The customer’s subscription to host the cluster. Type: + string (or Expression with resultType string). + :type host_subscription_id: object + :param service_principal_id: The service principal id for the hostSubscriptionId. Type: string + (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The key for the service principal id. + :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase + :param tenant: Required. The Tenant id/name to which the service principal belongs. Type: + string (or Expression with resultType string). + :type tenant: object + :param cluster_resource_group: Required. The resource group where the cluster belongs. Type: + string (or Expression with resultType string). + :type cluster_resource_group: object + :param cluster_name_prefix: The prefix of cluster name, postfix will be distinct with + timestamp. Type: string (or Expression with resultType string). + :type cluster_name_prefix: object + :param cluster_user_name: The username to access the cluster. Type: string (or Expression with + resultType string). + :type cluster_user_name: object + :param cluster_password: The password to access the cluster. + :type cluster_password: ~azure.synapse.artifacts.models.SecretBase + :param cluster_ssh_user_name: The username to SSH remotely connect to cluster’s node (for + Linux). Type: string (or Expression with resultType string). + :type cluster_ssh_user_name: object + :param cluster_ssh_password: The password to SSH remotely connect cluster’s node (for Linux). + :type cluster_ssh_password: ~azure.synapse.artifacts.models.SecretBase + :param additional_linked_service_names: Specifies additional storage accounts for the HDInsight + linked service so that the Data Factory service can register them on your behalf. + :type additional_linked_service_names: + list[~azure.synapse.artifacts.models.LinkedServiceReference] + :param hcatalog_linked_service_name: The name of Azure SQL linked service that point to the + HCatalog database. The on-demand HDInsight cluster is created by using the Azure SQL database + as the metastore. + :type hcatalog_linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param cluster_type: The cluster type. Type: string (or Expression with resultType string). + :type cluster_type: object + :param spark_version: The version of spark if the cluster type is 'spark'. Type: string (or + Expression with resultType string). + :type spark_version: object + :param core_configuration: Specifies the core configuration parameters (as in core-site.xml) + for the HDInsight cluster to be created. + :type core_configuration: object + :param h_base_configuration: Specifies the HBase configuration parameters (hbase-site.xml) for + the HDInsight cluster. + :type h_base_configuration: object + :param hdfs_configuration: Specifies the HDFS configuration parameters (hdfs-site.xml) for the + HDInsight cluster. + :type hdfs_configuration: object + :param hive_configuration: Specifies the hive configuration parameters (hive-site.xml) for the + HDInsight cluster. + :type hive_configuration: object + :param map_reduce_configuration: Specifies the MapReduce configuration parameters (mapred- + site.xml) for the HDInsight cluster. + :type map_reduce_configuration: object + :param oozie_configuration: Specifies the Oozie configuration parameters (oozie-site.xml) for + the HDInsight cluster. + :type oozie_configuration: object + :param storm_configuration: Specifies the Storm configuration parameters (storm-site.xml) for + the HDInsight cluster. + :type storm_configuration: object + :param yarn_configuration: Specifies the Yarn configuration parameters (yarn-site.xml) for the + HDInsight cluster. + :type yarn_configuration: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + :param head_node_size: Specifies the size of the head node for the HDInsight cluster. + :type head_node_size: object + :param data_node_size: Specifies the size of the data node for the HDInsight cluster. + :type data_node_size: object + :param zookeeper_node_size: Specifies the size of the Zoo Keeper node for the HDInsight + cluster. + :type zookeeper_node_size: object + :param script_actions: Custom script actions to run on HDI ondemand cluster once it's up. + Please refer to https://docs.microsoft.com/en-us/azure/hdinsight/hdinsight-hadoop-customize- + cluster-linux?toc=%2Fen-us%2Fazure%2Fhdinsight%2Fr-server%2FTOC.json&bc=%2Fen- + us%2Fazure%2Fbread%2Ftoc.json#understanding-script-actions. + :type script_actions: list[~azure.synapse.artifacts.models.ScriptAction] + :param virtual_network_id: The ARM resource ID for the vNet to which the cluster should be + joined after creation. Type: string (or Expression with resultType string). + :type virtual_network_id: object + :param subnet_name: The ARM resource ID for the subnet in the vNet. If virtualNetworkId was + specified, then this property is required. Type: string (or Expression with resultType string). + :type subnet_name: object + """ + + _validation = { + 'type': {'required': True}, + 'cluster_size': {'required': True}, + 'time_to_live': {'required': True}, + 'version': {'required': True}, + 'linked_service_name': {'required': True}, + 'host_subscription_id': {'required': True}, + 'tenant': {'required': True}, + 'cluster_resource_group': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'cluster_size': {'key': 'typeProperties.clusterSize', 'type': 'object'}, + 'time_to_live': {'key': 'typeProperties.timeToLive', 'type': 'object'}, + 'version': {'key': 'typeProperties.version', 'type': 'object'}, + 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, + 'host_subscription_id': {'key': 'typeProperties.hostSubscriptionId', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'cluster_resource_group': {'key': 'typeProperties.clusterResourceGroup', 'type': 'object'}, + 'cluster_name_prefix': {'key': 'typeProperties.clusterNamePrefix', 'type': 'object'}, + 'cluster_user_name': {'key': 'typeProperties.clusterUserName', 'type': 'object'}, + 'cluster_password': {'key': 'typeProperties.clusterPassword', 'type': 'SecretBase'}, + 'cluster_ssh_user_name': {'key': 'typeProperties.clusterSshUserName', 'type': 'object'}, + 'cluster_ssh_password': {'key': 'typeProperties.clusterSshPassword', 'type': 'SecretBase'}, + 'additional_linked_service_names': {'key': 'typeProperties.additionalLinkedServiceNames', 'type': '[LinkedServiceReference]'}, + 'hcatalog_linked_service_name': {'key': 'typeProperties.hcatalogLinkedServiceName', 'type': 'LinkedServiceReference'}, + 'cluster_type': {'key': 'typeProperties.clusterType', 'type': 'object'}, + 'spark_version': {'key': 'typeProperties.sparkVersion', 'type': 'object'}, + 'core_configuration': {'key': 'typeProperties.coreConfiguration', 'type': 'object'}, + 'h_base_configuration': {'key': 'typeProperties.hBaseConfiguration', 'type': 'object'}, + 'hdfs_configuration': {'key': 'typeProperties.hdfsConfiguration', 'type': 'object'}, + 'hive_configuration': {'key': 'typeProperties.hiveConfiguration', 'type': 'object'}, + 'map_reduce_configuration': {'key': 'typeProperties.mapReduceConfiguration', 'type': 'object'}, + 'oozie_configuration': {'key': 'typeProperties.oozieConfiguration', 'type': 'object'}, + 'storm_configuration': {'key': 'typeProperties.stormConfiguration', 'type': 'object'}, + 'yarn_configuration': {'key': 'typeProperties.yarnConfiguration', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'head_node_size': {'key': 'typeProperties.headNodeSize', 'type': 'object'}, + 'data_node_size': {'key': 'typeProperties.dataNodeSize', 'type': 'object'}, + 'zookeeper_node_size': {'key': 'typeProperties.zookeeperNodeSize', 'type': 'object'}, + 'script_actions': {'key': 'typeProperties.scriptActions', 'type': '[ScriptAction]'}, + 'virtual_network_id': {'key': 'typeProperties.virtualNetworkId', 'type': 'object'}, + 'subnet_name': {'key': 'typeProperties.subnetName', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(HDInsightOnDemandLinkedService, self).__init__(**kwargs) + self.type = 'HDInsightOnDemand' # type: str + self.cluster_size = kwargs['cluster_size'] + self.time_to_live = kwargs['time_to_live'] + self.version = kwargs['version'] + self.linked_service_name = kwargs['linked_service_name'] + self.host_subscription_id = kwargs['host_subscription_id'] + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs['tenant'] + self.cluster_resource_group = kwargs['cluster_resource_group'] + self.cluster_name_prefix = kwargs.get('cluster_name_prefix', None) + self.cluster_user_name = kwargs.get('cluster_user_name', None) + self.cluster_password = kwargs.get('cluster_password', None) + self.cluster_ssh_user_name = kwargs.get('cluster_ssh_user_name', None) + self.cluster_ssh_password = kwargs.get('cluster_ssh_password', None) + self.additional_linked_service_names = kwargs.get('additional_linked_service_names', None) + self.hcatalog_linked_service_name = kwargs.get('hcatalog_linked_service_name', None) + self.cluster_type = kwargs.get('cluster_type', None) + self.spark_version = kwargs.get('spark_version', None) + self.core_configuration = kwargs.get('core_configuration', None) + self.h_base_configuration = kwargs.get('h_base_configuration', None) + self.hdfs_configuration = kwargs.get('hdfs_configuration', None) + self.hive_configuration = kwargs.get('hive_configuration', None) + self.map_reduce_configuration = kwargs.get('map_reduce_configuration', None) + self.oozie_configuration = kwargs.get('oozie_configuration', None) + self.storm_configuration = kwargs.get('storm_configuration', None) + self.yarn_configuration = kwargs.get('yarn_configuration', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.head_node_size = kwargs.get('head_node_size', None) + self.data_node_size = kwargs.get('data_node_size', None) + self.zookeeper_node_size = kwargs.get('zookeeper_node_size', None) + self.script_actions = kwargs.get('script_actions', None) + self.virtual_network_id = kwargs.get('virtual_network_id', None) + self.subnet_name = kwargs.get('subnet_name', None) + + +class HDInsightPigActivity(ExecutionActivity): + """HDInsight Pig activity type. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param linked_service_name: Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param storage_linked_services: Storage linked service references. + :type storage_linked_services: list[~azure.synapse.artifacts.models.LinkedServiceReference] + :param arguments: User specified arguments to HDInsightActivity. Type: array (or Expression + with resultType array). + :type arguments: object + :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". + :type get_debug_info: str or ~azure.synapse.artifacts.models.HDInsightActivityDebugInfoOption + :param script_path: Script path. Type: string (or Expression with resultType string). + :type script_path: object + :param script_linked_service: Script linked service reference. + :type script_linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference + :param defines: Allows user to specify defines for Pig job request. + :type defines: dict[str, object] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, + 'arguments': {'key': 'typeProperties.arguments', 'type': 'object'}, + 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, + 'script_path': {'key': 'typeProperties.scriptPath', 'type': 'object'}, + 'script_linked_service': {'key': 'typeProperties.scriptLinkedService', 'type': 'LinkedServiceReference'}, + 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, + } + + def __init__( + self, + **kwargs + ): + super(HDInsightPigActivity, self).__init__(**kwargs) + self.type = 'HDInsightPig' # type: str + self.storage_linked_services = kwargs.get('storage_linked_services', None) + self.arguments = kwargs.get('arguments', None) + self.get_debug_info = kwargs.get('get_debug_info', None) + self.script_path = kwargs.get('script_path', None) + self.script_linked_service = kwargs.get('script_linked_service', None) + self.defines = kwargs.get('defines', None) + + +class HDInsightSparkActivity(ExecutionActivity): + """HDInsight Spark activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param linked_service_name: Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param root_path: Required. The root path in 'sparkJobLinkedService' for all the job’s files. + Type: string (or Expression with resultType string). + :type root_path: object + :param entry_file_path: Required. The relative path to the root folder of the code/package to + be executed. Type: string (or Expression with resultType string). + :type entry_file_path: object + :param arguments: The user-specified arguments to HDInsightSparkActivity. + :type arguments: list[object] + :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". + :type get_debug_info: str or ~azure.synapse.artifacts.models.HDInsightActivityDebugInfoOption + :param spark_job_linked_service: The storage linked service for uploading the entry file and + dependencies, and for receiving logs. + :type spark_job_linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference + :param class_name: The application's Java/Spark main class. + :type class_name: str + :param proxy_user: The user to impersonate that will execute the job. Type: string (or + Expression with resultType string). + :type proxy_user: object + :param spark_config: Spark configuration property. + :type spark_config: dict[str, object] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'root_path': {'required': True}, + 'entry_file_path': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'root_path': {'key': 'typeProperties.rootPath', 'type': 'object'}, + 'entry_file_path': {'key': 'typeProperties.entryFilePath', 'type': 'object'}, + 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, + 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, + 'spark_job_linked_service': {'key': 'typeProperties.sparkJobLinkedService', 'type': 'LinkedServiceReference'}, + 'class_name': {'key': 'typeProperties.className', 'type': 'str'}, + 'proxy_user': {'key': 'typeProperties.proxyUser', 'type': 'object'}, + 'spark_config': {'key': 'typeProperties.sparkConfig', 'type': '{object}'}, + } + + def __init__( + self, + **kwargs + ): + super(HDInsightSparkActivity, self).__init__(**kwargs) + self.type = 'HDInsightSpark' # type: str + self.root_path = kwargs['root_path'] + self.entry_file_path = kwargs['entry_file_path'] + self.arguments = kwargs.get('arguments', None) + self.get_debug_info = kwargs.get('get_debug_info', None) + self.spark_job_linked_service = kwargs.get('spark_job_linked_service', None) + self.class_name = kwargs.get('class_name', None) + self.proxy_user = kwargs.get('proxy_user', None) + self.spark_config = kwargs.get('spark_config', None) + + +class HDInsightStreamingActivity(ExecutionActivity): + """HDInsight streaming activity type. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param linked_service_name: Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param storage_linked_services: Storage linked service references. + :type storage_linked_services: list[~azure.synapse.artifacts.models.LinkedServiceReference] + :param arguments: User specified arguments to HDInsightActivity. + :type arguments: list[object] + :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". + :type get_debug_info: str or ~azure.synapse.artifacts.models.HDInsightActivityDebugInfoOption + :param mapper: Required. Mapper executable name. Type: string (or Expression with resultType + string). + :type mapper: object + :param reducer: Required. Reducer executable name. Type: string (or Expression with resultType + string). + :type reducer: object + :param input: Required. Input blob path. Type: string (or Expression with resultType string). + :type input: object + :param output: Required. Output blob path. Type: string (or Expression with resultType string). + :type output: object + :param file_paths: Required. Paths to streaming job files. Can be directories. + :type file_paths: list[object] + :param file_linked_service: Linked service reference where the files are located. + :type file_linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference + :param combiner: Combiner executable name. Type: string (or Expression with resultType string). + :type combiner: object + :param command_environment: Command line environment values. + :type command_environment: list[object] + :param defines: Allows user to specify defines for streaming job request. + :type defines: dict[str, object] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'mapper': {'required': True}, + 'reducer': {'required': True}, + 'input': {'required': True}, + 'output': {'required': True}, + 'file_paths': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, + 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, + 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, + 'mapper': {'key': 'typeProperties.mapper', 'type': 'object'}, + 'reducer': {'key': 'typeProperties.reducer', 'type': 'object'}, + 'input': {'key': 'typeProperties.input', 'type': 'object'}, + 'output': {'key': 'typeProperties.output', 'type': 'object'}, + 'file_paths': {'key': 'typeProperties.filePaths', 'type': '[object]'}, + 'file_linked_service': {'key': 'typeProperties.fileLinkedService', 'type': 'LinkedServiceReference'}, + 'combiner': {'key': 'typeProperties.combiner', 'type': 'object'}, + 'command_environment': {'key': 'typeProperties.commandEnvironment', 'type': '[object]'}, + 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, + } + + def __init__( + self, + **kwargs + ): + super(HDInsightStreamingActivity, self).__init__(**kwargs) + self.type = 'HDInsightStreaming' # type: str + self.storage_linked_services = kwargs.get('storage_linked_services', None) + self.arguments = kwargs.get('arguments', None) + self.get_debug_info = kwargs.get('get_debug_info', None) + self.mapper = kwargs['mapper'] + self.reducer = kwargs['reducer'] + self.input = kwargs['input'] + self.output = kwargs['output'] + self.file_paths = kwargs['file_paths'] + self.file_linked_service = kwargs.get('file_linked_service', None) + self.combiner = kwargs.get('combiner', None) + self.command_environment = kwargs.get('command_environment', None) + self.defines = kwargs.get('defines', None) + + +class HiveLinkedService(LinkedService): + """Hive Server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param host: Required. IP address or host name of the Hive server, separated by ';' for + multiple hosts (only when serviceDiscoveryMode is enable). + :type host: object + :param port: The TCP port that the Hive server uses to listen for client connections. + :type port: object + :param server_type: The type of Hive server. Possible values include: "HiveServer1", + "HiveServer2", "HiveThriftServer". + :type server_type: str or ~azure.synapse.artifacts.models.HiveServerType + :param thrift_transport_protocol: The transport protocol to use in the Thrift layer. Possible + values include: "Binary", "SASL", "HTTP ". + :type thrift_transport_protocol: str or + ~azure.synapse.artifacts.models.HiveThriftTransportProtocol + :param authentication_type: Required. The authentication method used to access the Hive server. + Possible values include: "Anonymous", "Username", "UsernameAndPassword", + "WindowsAzureHDInsightService". + :type authentication_type: str or ~azure.synapse.artifacts.models.HiveAuthenticationType + :param service_discovery_mode: true to indicate using the ZooKeeper service, false not. + :type service_discovery_mode: object + :param zoo_keeper_name_space: The namespace on ZooKeeper under which Hive Server 2 nodes are + added. + :type zoo_keeper_name_space: object + :param use_native_query: Specifies whether the driver uses native HiveQL queries,or converts + them into an equivalent form in HiveQL. + :type use_native_query: object + :param username: The user name that you use to access Hive Server. + :type username: object + :param password: The password corresponding to the user name that you provided in the Username + field. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param http_path: The partial URL corresponding to the Hive server. + :type http_path: object + :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The + default value is false. + :type enable_ssl: object + :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for + verifying the server when connecting over SSL. This property can only be set when using SSL on + self-hosted IR. The default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust + store or from a specified PEM file. The default value is false. + :type use_system_trust_store: object + :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate + name to match the host name of the server when connecting over SSL. The default value is false. + :type allow_host_name_cn_mismatch: object + :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from + the server. The default value is false. + :type allow_self_signed_server_cert: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'server_type': {'key': 'typeProperties.serverType', 'type': 'str'}, + 'thrift_transport_protocol': {'key': 'typeProperties.thriftTransportProtocol', 'type': 'str'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'service_discovery_mode': {'key': 'typeProperties.serviceDiscoveryMode', 'type': 'object'}, + 'zoo_keeper_name_space': {'key': 'typeProperties.zooKeeperNameSpace', 'type': 'object'}, + 'use_native_query': {'key': 'typeProperties.useNativeQuery', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, + 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(HiveLinkedService, self).__init__(**kwargs) + self.type = 'Hive' # type: str + self.host = kwargs['host'] + self.port = kwargs.get('port', None) + self.server_type = kwargs.get('server_type', None) + self.thrift_transport_protocol = kwargs.get('thrift_transport_protocol', None) + self.authentication_type = kwargs['authentication_type'] + self.service_discovery_mode = kwargs.get('service_discovery_mode', None) + self.zoo_keeper_name_space = kwargs.get('zoo_keeper_name_space', None) + self.use_native_query = kwargs.get('use_native_query', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.http_path = kwargs.get('http_path', None) + self.enable_ssl = kwargs.get('enable_ssl', None) + self.trusted_cert_path = kwargs.get('trusted_cert_path', None) + self.use_system_trust_store = kwargs.get('use_system_trust_store', None) + self.allow_host_name_cn_mismatch = kwargs.get('allow_host_name_cn_mismatch', None) + self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + + +class HiveObjectDataset(Dataset): + """Hive Server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: This property will be retired. Please consider using schema + table + properties instead. + :type table_name: object + :param table: The table name of the Hive. Type: string (or Expression with resultType string). + :type table: object + :param schema_type_properties_schema: The schema name of the Hive. Type: string (or Expression + with resultType string). + :type schema_type_properties_schema: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(HiveObjectDataset, self).__init__(**kwargs) + self.type = 'HiveObject' # type: str + self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) + + +class HiveSource(TabularSource): + """A copy activity Hive Server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(HiveSource, self).__init__(**kwargs) + self.type = 'HiveSource' # type: str + self.query = kwargs.get('query', None) + + +class HttpLinkedService(LinkedService): + """Linked service for an HTTP source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param url: Required. The base URL of the HTTP endpoint, e.g. http://www.microsoft.com. Type: + string (or Expression with resultType string). + :type url: object + :param authentication_type: The authentication type to be used to connect to the HTTP server. + Possible values include: "Basic", "Anonymous", "Digest", "Windows", "ClientCertificate". + :type authentication_type: str or ~azure.synapse.artifacts.models.HttpAuthenticationType + :param user_name: User name for Basic, Digest, or Windows authentication. Type: string (or + Expression with resultType string). + :type user_name: object + :param password: Password for Basic, Digest, Windows, or ClientCertificate with + EmbeddedCertData authentication. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param embedded_cert_data: Base64 encoded certificate data for ClientCertificate + authentication. For on-premises copy with ClientCertificate authentication, either + CertThumbprint or EmbeddedCertData/Password should be specified. Type: string (or Expression + with resultType string). + :type embedded_cert_data: object + :param cert_thumbprint: Thumbprint of certificate for ClientCertificate authentication. Only + valid for on-premises copy. For on-premises copy with ClientCertificate authentication, either + CertThumbprint or EmbeddedCertData/Password should be specified. Type: string (or Expression + with resultType string). + :type cert_thumbprint: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + :param enable_server_certificate_validation: If true, validate the HTTPS server SSL + certificate. Default value is true. Type: boolean (or Expression with resultType boolean). + :type enable_server_certificate_validation: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'embedded_cert_data': {'key': 'typeProperties.embeddedCertData', 'type': 'object'}, + 'cert_thumbprint': {'key': 'typeProperties.certThumbprint', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(HttpLinkedService, self).__init__(**kwargs) + self.type = 'HttpServer' # type: str + self.url = kwargs['url'] + self.authentication_type = kwargs.get('authentication_type', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.embedded_cert_data = kwargs.get('embedded_cert_data', None) + self.cert_thumbprint = kwargs.get('cert_thumbprint', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.enable_server_certificate_validation = kwargs.get('enable_server_certificate_validation', None) + + +class HttpReadSettings(StoreReadSettings): + """Sftp read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The read setting type.Constant filled by server. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param request_method: The HTTP method used to call the RESTful API. The default is GET. Type: + string (or Expression with resultType string). + :type request_method: object + :param request_body: The HTTP request body to the RESTful API if requestMethod is POST. Type: + string (or Expression with resultType string). + :type request_body: object + :param additional_headers: The additional HTTP headers in the request to the RESTful API. Type: + string (or Expression with resultType string). + :type additional_headers: object + :param request_timeout: Specifies the timeout for a HTTP client to get HTTP response from HTTP + server. + :type request_timeout: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'request_method': {'key': 'requestMethod', 'type': 'object'}, + 'request_body': {'key': 'requestBody', 'type': 'object'}, + 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, + 'request_timeout': {'key': 'requestTimeout', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(HttpReadSettings, self).__init__(**kwargs) + self.type = 'HttpReadSettings' # type: str + self.request_method = kwargs.get('request_method', None) + self.request_body = kwargs.get('request_body', None) + self.additional_headers = kwargs.get('additional_headers', None) + self.request_timeout = kwargs.get('request_timeout', None) + + +class HttpServerLocation(DatasetLocation): + """The location of http server. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location.Constant filled by server. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + string). + :type file_name: object + :param relative_url: Specify the relativeUrl of http server. Type: string (or Expression with + resultType string). + :type relative_url: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'relative_url': {'key': 'relativeUrl', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(HttpServerLocation, self).__init__(**kwargs) + self.type = 'HttpServerLocation' # type: str + self.relative_url = kwargs.get('relative_url', None) + + +class HttpSource(CopySource): + """A copy activity source for an HTTP file. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param http_request_timeout: Specifies the timeout for a HTTP client to get HTTP response from + HTTP server. The default value is equivalent to System.Net.HttpWebRequest.Timeout. Type: string + (or Expression with resultType string), pattern: + ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type http_request_timeout: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(HttpSource, self).__init__(**kwargs) + self.type = 'HttpSource' # type: str + self.http_request_timeout = kwargs.get('http_request_timeout', None) + + +class HubspotLinkedService(LinkedService): + """Hubspot Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param client_id: Required. The client ID associated with your Hubspot application. + :type client_id: object + :param client_secret: The client secret associated with your Hubspot application. + :type client_secret: ~azure.synapse.artifacts.models.SecretBase + :param access_token: The access token obtained when initially authenticating your OAuth + integration. + :type access_token: ~azure.synapse.artifacts.models.SecretBase + :param refresh_token: The refresh token obtained when initially authenticating your OAuth + integration. + :type refresh_token: ~azure.synapse.artifacts.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using + HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name in the server's + certificate to match the host name of the server when connecting over SSL. The default value is + true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of the server when + connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'client_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(HubspotLinkedService, self).__init__(**kwargs) + self.type = 'Hubspot' # type: str + self.client_id = kwargs['client_id'] + self.client_secret = kwargs.get('client_secret', None) + self.access_token = kwargs.get('access_token', None) + self.refresh_token = kwargs.get('refresh_token', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + + +class HubspotObjectDataset(Dataset): + """Hubspot Service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: The table name. Type: string (or Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(HubspotObjectDataset, self).__init__(**kwargs) + self.type = 'HubspotObject' # type: str + self.table_name = kwargs.get('table_name', None) + + +class HubspotSource(TabularSource): + """A copy activity Hubspot Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(HubspotSource, self).__init__(**kwargs) + self.type = 'HubspotSource' # type: str + self.query = kwargs.get('query', None) + + +class IfConditionActivity(Activity): + """This activity evaluates a boolean expression and executes either the activities under the ifTrueActivities property or the ifFalseActivities property depending on the result of the expression. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param expression: Required. An expression that would evaluate to Boolean. This is used to + determine the block of activities (ifTrueActivities or ifFalseActivities) that will be + executed. + :type expression: ~azure.synapse.artifacts.models.Expression + :param if_true_activities: List of activities to execute if expression is evaluated to true. + This is an optional property and if not provided, the activity will exit without any action. + :type if_true_activities: list[~azure.synapse.artifacts.models.Activity] + :param if_false_activities: List of activities to execute if expression is evaluated to false. + This is an optional property and if not provided, the activity will exit without any action. + :type if_false_activities: list[~azure.synapse.artifacts.models.Activity] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'expression': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'expression': {'key': 'typeProperties.expression', 'type': 'Expression'}, + 'if_true_activities': {'key': 'typeProperties.ifTrueActivities', 'type': '[Activity]'}, + 'if_false_activities': {'key': 'typeProperties.ifFalseActivities', 'type': '[Activity]'}, + } + + def __init__( + self, + **kwargs + ): + super(IfConditionActivity, self).__init__(**kwargs) + self.type = 'IfCondition' # type: str + self.expression = kwargs['expression'] + self.if_true_activities = kwargs.get('if_true_activities', None) + self.if_false_activities = kwargs.get('if_false_activities', None) + + +class ImpalaLinkedService(LinkedService): + """Impala server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param host: Required. The IP address or host name of the Impala server. (i.e. + 192.168.222.160). + :type host: object + :param port: The TCP port that the Impala server uses to listen for client connections. The + default value is 21050. + :type port: object + :param authentication_type: Required. The authentication type to use. Possible values include: + "Anonymous", "SASLUsername", "UsernameAndPassword". + :type authentication_type: str or ~azure.synapse.artifacts.models.ImpalaAuthenticationType + :param username: The user name used to access the Impala server. The default value is anonymous + when using SASLUsername. + :type username: object + :param password: The password corresponding to the user name when using UsernameAndPassword. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The + default value is false. + :type enable_ssl: object + :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for + verifying the server when connecting over SSL. This property can only be set when using SSL on + self-hosted IR. The default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust + store or from a specified PEM file. The default value is false. + :type use_system_trust_store: object + :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate + name to match the host name of the server when connecting over SSL. The default value is false. + :type allow_host_name_cn_mismatch: object + :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from + the server. The default value is false. + :type allow_self_signed_server_cert: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, + 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(ImpalaLinkedService, self).__init__(**kwargs) + self.type = 'Impala' # type: str + self.host = kwargs['host'] + self.port = kwargs.get('port', None) + self.authentication_type = kwargs['authentication_type'] + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.enable_ssl = kwargs.get('enable_ssl', None) + self.trusted_cert_path = kwargs.get('trusted_cert_path', None) + self.use_system_trust_store = kwargs.get('use_system_trust_store', None) + self.allow_host_name_cn_mismatch = kwargs.get('allow_host_name_cn_mismatch', None) + self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + + +class ImpalaObjectDataset(Dataset): + """Impala server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: This property will be retired. Please consider using schema + table + properties instead. + :type table_name: object + :param table: The table name of the Impala. Type: string (or Expression with resultType + string). + :type table: object + :param schema_type_properties_schema: The schema name of the Impala. Type: string (or + Expression with resultType string). + :type schema_type_properties_schema: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(ImpalaObjectDataset, self).__init__(**kwargs) + self.type = 'ImpalaObject' # type: str + self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) + + +class ImpalaSource(TabularSource): + """A copy activity Impala server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(ImpalaSource, self).__init__(**kwargs) + self.type = 'ImpalaSource' # type: str + self.query = kwargs.get('query', None) + + +class InformixLinkedService(LinkedService): + """Informix linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param connection_string: Required. The non-access credential portion of the connection string + as well as an optional encrypted credential. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param authentication_type: Type of authentication used to connect to the Informix as ODBC data + store. Possible values are: Anonymous and Basic. Type: string (or Expression with resultType + string). + :type authentication_type: object + :param credential: The access credential portion of the connection string specified in driver- + specific property-value format. + :type credential: ~azure.synapse.artifacts.models.SecretBase + :param user_name: User name for Basic authentication. Type: string (or Expression with + resultType string). + :type user_name: object + :param password: Password for Basic authentication. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(InformixLinkedService, self).__init__(**kwargs) + self.type = 'Informix' # type: str + self.connection_string = kwargs['connection_string'] + self.authentication_type = kwargs.get('authentication_type', None) + self.credential = kwargs.get('credential', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + + +class InformixSink(CopySink): + """A copy activity Informix sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param pre_copy_script: A query to execute before starting the copy. Type: string (or + Expression with resultType string). + :type pre_copy_script: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(InformixSink, self).__init__(**kwargs) + self.type = 'InformixSink' # type: str + self.pre_copy_script = kwargs.get('pre_copy_script', None) + + +class InformixSource(TabularSource): + """A copy activity source for Informix. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: Database query. Type: string (or Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(InformixSource, self).__init__(**kwargs) + self.type = 'InformixSource' # type: str + self.query = kwargs.get('query', None) + + +class InformixTableDataset(Dataset): + """The Informix table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: The Informix table name. Type: string (or Expression with resultType + string). + :type table_name: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(InformixTableDataset, self).__init__(**kwargs) + self.type = 'InformixTable' # type: str + self.table_name = kwargs.get('table_name', None) + + +class IntegrationRuntime(msrest.serialization.Model): + """Azure Synapse nested object which serves as a compute resource for activities. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: ManagedIntegrationRuntime, SelfHostedIntegrationRuntime. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of integration runtime.Constant filled by server. Possible values + include: "Managed", "SelfHosted". + :type type: str or ~azure.synapse.artifacts.models.IntegrationRuntimeType + :param description: Integration runtime description. + :type description: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'Managed': 'ManagedIntegrationRuntime', 'SelfHosted': 'SelfHostedIntegrationRuntime'} + } + + def __init__( + self, + **kwargs + ): + super(IntegrationRuntime, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = 'IntegrationRuntime' # type: str + self.description = kwargs.get('description', None) + + +class IntegrationRuntimeComputeProperties(msrest.serialization.Model): + """The compute resource properties for managed integration runtime. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param location: The location for managed integration runtime. The supported regions could be + found on https://docs.microsoft.com/en-us/azure/data-factory/data-factory-data-movement- + activities. + :type location: str + :param node_size: The node size requirement to managed integration runtime. + :type node_size: str + :param number_of_nodes: The required number of nodes for managed integration runtime. + :type number_of_nodes: int + :param max_parallel_executions_per_node: Maximum parallel executions count per node for managed + integration runtime. + :type max_parallel_executions_per_node: int + :param data_flow_properties: Data flow properties for managed integration runtime. + :type data_flow_properties: + ~azure.synapse.artifacts.models.IntegrationRuntimeDataFlowProperties + :param v_net_properties: VNet properties for managed integration runtime. + :type v_net_properties: ~azure.synapse.artifacts.models.IntegrationRuntimeVNetProperties + """ + + _validation = { + 'number_of_nodes': {'minimum': 1}, + 'max_parallel_executions_per_node': {'minimum': 1}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'location': {'key': 'location', 'type': 'str'}, + 'node_size': {'key': 'nodeSize', 'type': 'str'}, + 'number_of_nodes': {'key': 'numberOfNodes', 'type': 'int'}, + 'max_parallel_executions_per_node': {'key': 'maxParallelExecutionsPerNode', 'type': 'int'}, + 'data_flow_properties': {'key': 'dataFlowProperties', 'type': 'IntegrationRuntimeDataFlowProperties'}, + 'v_net_properties': {'key': 'vNetProperties', 'type': 'IntegrationRuntimeVNetProperties'}, + } + + def __init__( + self, + **kwargs + ): + super(IntegrationRuntimeComputeProperties, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.location = kwargs.get('location', None) + self.node_size = kwargs.get('node_size', None) + self.number_of_nodes = kwargs.get('number_of_nodes', None) + self.max_parallel_executions_per_node = kwargs.get('max_parallel_executions_per_node', None) + self.data_flow_properties = kwargs.get('data_flow_properties', None) + self.v_net_properties = kwargs.get('v_net_properties', None) + + +class IntegrationRuntimeCustomSetupScriptProperties(msrest.serialization.Model): + """Custom setup script properties for a managed dedicated integration runtime. + + :param blob_container_uri: The URI of the Azure blob container that contains the custom setup + script. + :type blob_container_uri: str + :param sas_token: The SAS token of the Azure blob container. + :type sas_token: ~azure.synapse.artifacts.models.SecureString + """ + + _attribute_map = { + 'blob_container_uri': {'key': 'blobContainerUri', 'type': 'str'}, + 'sas_token': {'key': 'sasToken', 'type': 'SecureString'}, + } + + def __init__( + self, + **kwargs + ): + super(IntegrationRuntimeCustomSetupScriptProperties, self).__init__(**kwargs) + self.blob_container_uri = kwargs.get('blob_container_uri', None) + self.sas_token = kwargs.get('sas_token', None) + + +class IntegrationRuntimeDataFlowProperties(msrest.serialization.Model): + """Data flow properties for managed integration runtime. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param compute_type: Compute type of the cluster which will execute data flow job. Possible + values include: "General", "MemoryOptimized", "ComputeOptimized". + :type compute_type: str or ~azure.synapse.artifacts.models.DataFlowComputeType + :param core_count: Core count of the cluster which will execute data flow job. Supported values + are: 8, 16, 32, 48, 80, 144 and 272. + :type core_count: int + :param time_to_live: Time to live (in minutes) setting of the cluster which will execute data + flow job. + :type time_to_live: int + """ + + _validation = { + 'time_to_live': {'minimum': 0}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'compute_type': {'key': 'computeType', 'type': 'str'}, + 'core_count': {'key': 'coreCount', 'type': 'int'}, + 'time_to_live': {'key': 'timeToLive', 'type': 'int'}, + } + + def __init__( + self, + **kwargs + ): + super(IntegrationRuntimeDataFlowProperties, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.compute_type = kwargs.get('compute_type', None) + self.core_count = kwargs.get('core_count', None) + self.time_to_live = kwargs.get('time_to_live', None) + + +class IntegrationRuntimeDataProxyProperties(msrest.serialization.Model): + """Data proxy properties for a managed dedicated integration runtime. + + :param connect_via: The self-hosted integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.EntityReference + :param staging_linked_service: The staging linked service reference. + :type staging_linked_service: ~azure.synapse.artifacts.models.EntityReference + :param path: The path to contain the staged data in the Blob storage. + :type path: str + """ + + _attribute_map = { + 'connect_via': {'key': 'connectVia', 'type': 'EntityReference'}, + 'staging_linked_service': {'key': 'stagingLinkedService', 'type': 'EntityReference'}, + 'path': {'key': 'path', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(IntegrationRuntimeDataProxyProperties, self).__init__(**kwargs) + self.connect_via = kwargs.get('connect_via', None) + self.staging_linked_service = kwargs.get('staging_linked_service', None) + self.path = kwargs.get('path', None) + + +class IntegrationRuntimeListResponse(msrest.serialization.Model): + """A list of integration runtime resources. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of integration runtimes. + :type value: list[~azure.synapse.artifacts.models.IntegrationRuntimeResource] + :param next_link: The link to the next page of results, if any remaining results exist. + :type next_link: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[IntegrationRuntimeResource]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(IntegrationRuntimeListResponse, self).__init__(**kwargs) + self.value = kwargs['value'] + self.next_link = kwargs.get('next_link', None) + + +class IntegrationRuntimeReference(msrest.serialization.Model): + """Integration runtime reference type. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Type of integration runtime. Possible values include: + "IntegrationRuntimeReference". + :type type: str or ~azure.synapse.artifacts.models.IntegrationRuntimeReferenceType + :param reference_name: Required. Reference integration runtime name. + :type reference_name: str + :param parameters: Arguments for integration runtime. + :type parameters: dict[str, object] + """ + + _validation = { + 'type': {'required': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{object}'}, + } + + def __init__( + self, + **kwargs + ): + super(IntegrationRuntimeReference, self).__init__(**kwargs) + self.type = kwargs['type'] + self.reference_name = kwargs['reference_name'] + self.parameters = kwargs.get('parameters', None) + + +class IntegrationRuntimeResource(AzureEntityResource): + """Integration runtime resource type. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + :ivar etag: Resource Etag. + :vartype etag: str + :param properties: Required. Integration runtime properties. + :type properties: ~azure.synapse.artifacts.models.IntegrationRuntime + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'IntegrationRuntime'}, + } + + def __init__( + self, + **kwargs + ): + super(IntegrationRuntimeResource, self).__init__(**kwargs) + self.properties = kwargs['properties'] + + +class IntegrationRuntimeSsisCatalogInfo(msrest.serialization.Model): + """Catalog information for managed dedicated integration runtime. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param catalog_server_endpoint: The catalog database server URL. + :type catalog_server_endpoint: str + :param catalog_admin_user_name: The administrator user name of catalog database. + :type catalog_admin_user_name: str + :param catalog_admin_password: The password of the administrator user account of the catalog + database. + :type catalog_admin_password: ~azure.synapse.artifacts.models.SecureString + :param catalog_pricing_tier: The pricing tier for the catalog database. The valid values could + be found in https://azure.microsoft.com/en-us/pricing/details/sql-database/. Possible values + include: "Basic", "Standard", "Premium", "PremiumRS". + :type catalog_pricing_tier: str or + ~azure.synapse.artifacts.models.IntegrationRuntimeSsisCatalogPricingTier + """ + + _validation = { + 'catalog_admin_user_name': {'max_length': 128, 'min_length': 1}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'catalog_server_endpoint': {'key': 'catalogServerEndpoint', 'type': 'str'}, + 'catalog_admin_user_name': {'key': 'catalogAdminUserName', 'type': 'str'}, + 'catalog_admin_password': {'key': 'catalogAdminPassword', 'type': 'SecureString'}, + 'catalog_pricing_tier': {'key': 'catalogPricingTier', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(IntegrationRuntimeSsisCatalogInfo, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.catalog_server_endpoint = kwargs.get('catalog_server_endpoint', None) + self.catalog_admin_user_name = kwargs.get('catalog_admin_user_name', None) + self.catalog_admin_password = kwargs.get('catalog_admin_password', None) + self.catalog_pricing_tier = kwargs.get('catalog_pricing_tier', None) + + +class IntegrationRuntimeSsisProperties(msrest.serialization.Model): + """SSIS properties for managed integration runtime. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param catalog_info: Catalog information for managed dedicated integration runtime. + :type catalog_info: ~azure.synapse.artifacts.models.IntegrationRuntimeSsisCatalogInfo + :param license_type: License type for bringing your own license scenario. Possible values + include: "BasePrice", "LicenseIncluded". + :type license_type: str or ~azure.synapse.artifacts.models.IntegrationRuntimeLicenseType + :param custom_setup_script_properties: Custom setup script properties for a managed dedicated + integration runtime. + :type custom_setup_script_properties: + ~azure.synapse.artifacts.models.IntegrationRuntimeCustomSetupScriptProperties + :param data_proxy_properties: Data proxy properties for a managed dedicated integration + runtime. + :type data_proxy_properties: + ~azure.synapse.artifacts.models.IntegrationRuntimeDataProxyProperties + :param edition: The edition for the SSIS Integration Runtime. Possible values include: + "Standard", "Enterprise". + :type edition: str or ~azure.synapse.artifacts.models.IntegrationRuntimeEdition + :param express_custom_setup_properties: Custom setup without script properties for a SSIS + integration runtime. + :type express_custom_setup_properties: list[~azure.synapse.artifacts.models.CustomSetupBase] + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'catalog_info': {'key': 'catalogInfo', 'type': 'IntegrationRuntimeSsisCatalogInfo'}, + 'license_type': {'key': 'licenseType', 'type': 'str'}, + 'custom_setup_script_properties': {'key': 'customSetupScriptProperties', 'type': 'IntegrationRuntimeCustomSetupScriptProperties'}, + 'data_proxy_properties': {'key': 'dataProxyProperties', 'type': 'IntegrationRuntimeDataProxyProperties'}, + 'edition': {'key': 'edition', 'type': 'str'}, + 'express_custom_setup_properties': {'key': 'expressCustomSetupProperties', 'type': '[CustomSetupBase]'}, + } + + def __init__( + self, + **kwargs + ): + super(IntegrationRuntimeSsisProperties, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.catalog_info = kwargs.get('catalog_info', None) + self.license_type = kwargs.get('license_type', None) + self.custom_setup_script_properties = kwargs.get('custom_setup_script_properties', None) + self.data_proxy_properties = kwargs.get('data_proxy_properties', None) + self.edition = kwargs.get('edition', None) + self.express_custom_setup_properties = kwargs.get('express_custom_setup_properties', None) + + +class IntegrationRuntimeVNetProperties(msrest.serialization.Model): + """VNet properties for managed integration runtime. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param v_net_id: The ID of the VNet that this integration runtime will join. + :type v_net_id: str + :param subnet: The name of the subnet this integration runtime will join. + :type subnet: str + :param public_i_ps: Resource IDs of the public IP addresses that this integration runtime will + use. + :type public_i_ps: list[str] + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'v_net_id': {'key': 'vNetId', 'type': 'str'}, + 'subnet': {'key': 'subnet', 'type': 'str'}, + 'public_i_ps': {'key': 'publicIPs', 'type': '[str]'}, + } + + def __init__( + self, + **kwargs + ): + super(IntegrationRuntimeVNetProperties, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.v_net_id = kwargs.get('v_net_id', None) + self.subnet = kwargs.get('subnet', None) + self.public_i_ps = kwargs.get('public_i_ps', None) + + +class JiraLinkedService(LinkedService): + """Jira Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param host: Required. The IP address or host name of the Jira service. (e.g. + jira.example.com). + :type host: object + :param port: The TCP port that the Jira server uses to listen for client connections. The + default value is 443 if connecting through HTTPS, or 8080 if connecting through HTTP. + :type port: object + :param username: Required. The user name that you use to access Jira Service. + :type username: object + :param password: The password corresponding to the user name that you provided in the username + field. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using + HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name in the server's + certificate to match the host name of the server when connecting over SSL. The default value is + true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of the server when + connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'username': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(JiraLinkedService, self).__init__(**kwargs) + self.type = 'Jira' # type: str + self.host = kwargs['host'] + self.port = kwargs.get('port', None) + self.username = kwargs['username'] + self.password = kwargs.get('password', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + + +class JiraObjectDataset(Dataset): + """Jira Service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: The table name. Type: string (or Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(JiraObjectDataset, self).__init__(**kwargs) + self.type = 'JiraObject' # type: str + self.table_name = kwargs.get('table_name', None) + + +class JiraSource(TabularSource): + """A copy activity Jira Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(JiraSource, self).__init__(**kwargs) + self.type = 'JiraSource' # type: str + self.query = kwargs.get('query', None) + + +class JsonDataset(Dataset): + """Json dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param location: The location of the json data storage. + :type location: ~azure.synapse.artifacts.models.DatasetLocation + :param encoding_name: The code page name of the preferred encoding. If not specified, the + default value is UTF-8, unless BOM denotes another Unicode encoding. Refer to the name column + of the table in the following link to set supported values: + https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with + resultType string). + :type encoding_name: object + :param compression: The data compression method used for the json dataset. + :type compression: ~azure.synapse.artifacts.models.DatasetCompression + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'encoding_name': {'key': 'typeProperties.encodingName', 'type': 'object'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__( + self, + **kwargs + ): + super(JsonDataset, self).__init__(**kwargs) + self.type = 'Json' # type: str + self.location = kwargs.get('location', None) + self.encoding_name = kwargs.get('encoding_name', None) + self.compression = kwargs.get('compression', None) + + +class JsonFormat(DatasetStorageFormat): + """The data stored in JSON format. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage format.Constant filled by server. + :type type: str + :param serializer: Serializer. Type: string (or Expression with resultType string). + :type serializer: object + :param deserializer: Deserializer. Type: string (or Expression with resultType string). + :type deserializer: object + :param file_pattern: File pattern of JSON. To be more specific, the way of separating a + collection of JSON objects. The default value is 'setOfObjects'. It is case-sensitive. Possible + values include: "setOfObjects", "arrayOfObjects". + :type file_pattern: str or ~azure.synapse.artifacts.models.JsonFormatFilePattern + :param nesting_separator: The character used to separate nesting levels. Default value is '.' + (dot). Type: string (or Expression with resultType string). + :type nesting_separator: object + :param encoding_name: The code page name of the preferred encoding. If not provided, the + default value is 'utf-8', unless the byte order mark (BOM) denotes another Unicode encoding. + The full list of supported values can be found in the 'Name' column of the table of encodings + in the following reference: https://go.microsoft.com/fwlink/?linkid=861078. Type: string (or + Expression with resultType string). + :type encoding_name: object + :param json_node_reference: The JSONPath of the JSON array element to be flattened. Example: + "$.ArrayPath". Type: string (or Expression with resultType string). + :type json_node_reference: object + :param json_path_definition: The JSONPath definition for each column mapping with a customized + column name to extract data from JSON file. For fields under root object, start with "$"; for + fields inside the array chosen by jsonNodeReference property, start from the array element. + Example: {"Column1": "$.Column1Path", "Column2": "Column2PathInArray"}. Type: object (or + Expression with resultType object). + :type json_path_definition: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'serializer': {'key': 'serializer', 'type': 'object'}, + 'deserializer': {'key': 'deserializer', 'type': 'object'}, + 'file_pattern': {'key': 'filePattern', 'type': 'str'}, + 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, + 'encoding_name': {'key': 'encodingName', 'type': 'object'}, + 'json_node_reference': {'key': 'jsonNodeReference', 'type': 'object'}, + 'json_path_definition': {'key': 'jsonPathDefinition', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(JsonFormat, self).__init__(**kwargs) + self.type = 'JsonFormat' # type: str + self.file_pattern = kwargs.get('file_pattern', None) + self.nesting_separator = kwargs.get('nesting_separator', None) + self.encoding_name = kwargs.get('encoding_name', None) + self.json_node_reference = kwargs.get('json_node_reference', None) + self.json_path_definition = kwargs.get('json_path_definition', None) + + +class JsonSink(CopySink): + """A copy activity Json sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param store_settings: Json store settings. + :type store_settings: ~azure.synapse.artifacts.models.StoreWriteSettings + :param format_settings: Json format settings. + :type format_settings: ~azure.synapse.artifacts.models.JsonWriteSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'JsonWriteSettings'}, + } + + def __init__( + self, + **kwargs + ): + super(JsonSink, self).__init__(**kwargs) + self.type = 'JsonSink' # type: str + self.store_settings = kwargs.get('store_settings', None) + self.format_settings = kwargs.get('format_settings', None) + + +class JsonSource(CopySource): + """A copy activity Json source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param store_settings: Json store settings. + :type store_settings: ~azure.synapse.artifacts.models.StoreReadSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + } + + def __init__( + self, + **kwargs + ): + super(JsonSource, self).__init__(**kwargs) + self.type = 'JsonSource' # type: str + self.store_settings = kwargs.get('store_settings', None) + + +class JsonWriteSettings(FormatWriteSettings): + """Json write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The write setting type.Constant filled by server. + :type type: str + :param file_pattern: File pattern of JSON. This setting controls the way a collection of JSON + objects will be treated. The default value is 'setOfObjects'. It is case-sensitive. Possible + values include: "setOfObjects", "arrayOfObjects". + :type file_pattern: str or ~azure.synapse.artifacts.models.JsonWriteFilePattern + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'file_pattern': {'key': 'filePattern', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(JsonWriteSettings, self).__init__(**kwargs) + self.type = 'JsonWriteSettings' # type: str + self.file_pattern = kwargs.get('file_pattern', None) + + +class LibraryRequirements(msrest.serialization.Model): + """Library requirements for a Big Data pool powered by Apache Spark. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar time: The last update time of the library requirements file. + :vartype time: ~datetime.datetime + :param content: The library requirements. + :type content: str + :param filename: The filename of the library requirements file. + :type filename: str + """ + + _validation = { + 'time': {'readonly': True}, + } + + _attribute_map = { + 'time': {'key': 'time', 'type': 'iso-8601'}, + 'content': {'key': 'content', 'type': 'str'}, + 'filename': {'key': 'filename', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(LibraryRequirements, self).__init__(**kwargs) + self.time = None + self.content = kwargs.get('content', None) + self.filename = kwargs.get('filename', None) + + +class LinkedIntegrationRuntimeType(msrest.serialization.Model): + """The base definition of a linked integration runtime. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: LinkedIntegrationRuntimeKeyAuthorization, LinkedIntegrationRuntimeRbacAuthorization. + + All required parameters must be populated in order to send to Azure. + + :param authorization_type: Required. The authorization type for integration runtime + sharing.Constant filled by server. + :type authorization_type: str + """ + + _validation = { + 'authorization_type': {'required': True}, + } + + _attribute_map = { + 'authorization_type': {'key': 'authorizationType', 'type': 'str'}, + } + + _subtype_map = { + 'authorization_type': {'Key': 'LinkedIntegrationRuntimeKeyAuthorization', 'RBAC': 'LinkedIntegrationRuntimeRbacAuthorization'} + } + + def __init__( + self, + **kwargs + ): + super(LinkedIntegrationRuntimeType, self).__init__(**kwargs) + self.authorization_type = None # type: Optional[str] + + +class LinkedIntegrationRuntimeKeyAuthorization(LinkedIntegrationRuntimeType): + """The key authorization type integration runtime. + + All required parameters must be populated in order to send to Azure. + + :param authorization_type: Required. The authorization type for integration runtime + sharing.Constant filled by server. + :type authorization_type: str + :param key: Required. The key used for authorization. + :type key: ~azure.synapse.artifacts.models.SecureString + """ + + _validation = { + 'authorization_type': {'required': True}, + 'key': {'required': True}, + } + + _attribute_map = { + 'authorization_type': {'key': 'authorizationType', 'type': 'str'}, + 'key': {'key': 'key', 'type': 'SecureString'}, + } + + def __init__( + self, + **kwargs + ): + super(LinkedIntegrationRuntimeKeyAuthorization, self).__init__(**kwargs) + self.authorization_type = 'Key' # type: str + self.key = kwargs['key'] + + +class LinkedIntegrationRuntimeRbacAuthorization(LinkedIntegrationRuntimeType): + """The role based access control (RBAC) authorization type integration runtime. + + All required parameters must be populated in order to send to Azure. + + :param authorization_type: Required. The authorization type for integration runtime + sharing.Constant filled by server. + :type authorization_type: str + :param resource_id: Required. The resource identifier of the integration runtime to be shared. + :type resource_id: str + """ + + _validation = { + 'authorization_type': {'required': True}, + 'resource_id': {'required': True}, + } + + _attribute_map = { + 'authorization_type': {'key': 'authorizationType', 'type': 'str'}, + 'resource_id': {'key': 'resourceId', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(LinkedIntegrationRuntimeRbacAuthorization, self).__init__(**kwargs) + self.authorization_type = 'RBAC' # type: str + self.resource_id = kwargs['resource_id'] + + +class LinkedServiceDebugResource(SubResourceDebugResource): + """Linked service debug resource. + + All required parameters must be populated in order to send to Azure. + + :param name: The resource name. + :type name: str + :param properties: Required. Properties of linked service. + :type properties: ~azure.synapse.artifacts.models.LinkedService + """ + + _validation = { + 'properties': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'LinkedService'}, + } + + def __init__( + self, + **kwargs + ): + super(LinkedServiceDebugResource, self).__init__(**kwargs) + self.properties = kwargs['properties'] + + +class LinkedServiceListResponse(msrest.serialization.Model): + """A list of linked service resources. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of linked services. + :type value: list[~azure.synapse.artifacts.models.LinkedServiceResource] + :param next_link: The link to the next page of results, if any remaining results exist. + :type next_link: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[LinkedServiceResource]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(LinkedServiceListResponse, self).__init__(**kwargs) + self.value = kwargs['value'] + self.next_link = kwargs.get('next_link', None) + + +class LinkedServiceReference(msrest.serialization.Model): + """Linked service reference type. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Linked service reference type. Possible values include: + "LinkedServiceReference". + :type type: str or ~azure.synapse.artifacts.models.Type + :param reference_name: Required. Reference LinkedService name. + :type reference_name: str + :param parameters: Arguments for LinkedService. + :type parameters: dict[str, object] + """ + + _validation = { + 'type': {'required': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{object}'}, + } + + def __init__( + self, + **kwargs + ): + super(LinkedServiceReference, self).__init__(**kwargs) + self.type = kwargs['type'] + self.reference_name = kwargs['reference_name'] + self.parameters = kwargs.get('parameters', None) + + +class LinkedServiceResource(AzureEntityResource): + """Linked service resource type. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + :ivar etag: Resource Etag. + :vartype etag: str + :param properties: Required. Properties of linked service. + :type properties: ~azure.synapse.artifacts.models.LinkedService + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'LinkedService'}, + } + + def __init__( + self, + **kwargs + ): + super(LinkedServiceResource, self).__init__(**kwargs) + self.properties = kwargs['properties'] + + +class LogStorageSettings(msrest.serialization.Model): + """Log storage settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param linked_service_name: Required. Log storage linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param path: The path to storage for storing detailed logs of activity execution. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The key for the service principal id. - :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase - :param tenant: Required. The Tenant id/name to which the service principal belongs. Type: + :type path: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'path': {'key': 'path', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(LogStorageSettings, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.linked_service_name = kwargs['linked_service_name'] + self.path = kwargs.get('path', None) + + +class LookupActivity(ExecutionActivity): + """Lookup activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param linked_service_name: Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param source: Required. Dataset-specific source properties, same as copy activity source. + :type source: ~azure.synapse.artifacts.models.CopySource + :param dataset: Required. Lookup activity dataset reference. + :type dataset: ~azure.synapse.artifacts.models.DatasetReference + :param first_row_only: Whether to return first row or all rows. Default value is true. Type: + boolean (or Expression with resultType boolean). + :type first_row_only: object + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'source': {'required': True}, + 'dataset': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'source': {'key': 'typeProperties.source', 'type': 'CopySource'}, + 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, + 'first_row_only': {'key': 'typeProperties.firstRowOnly', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(LookupActivity, self).__init__(**kwargs) + self.type = 'Lookup' # type: str + self.source = kwargs['source'] + self.dataset = kwargs['dataset'] + self.first_row_only = kwargs.get('first_row_only', None) + + +class MagentoLinkedService(LinkedService): + """Magento server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param host: Required. The URL of the Magento instance. (i.e. 192.168.222.110/magento3). + :type host: object + :param access_token: The access token from Magento. + :type access_token: ~azure.synapse.artifacts.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using + HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name in the server's + certificate to match the host name of the server when connecting over SSL. The default value is + true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of the server when + connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(MagentoLinkedService, self).__init__(**kwargs) + self.type = 'Magento' # type: str + self.host = kwargs['host'] + self.access_token = kwargs.get('access_token', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + + +class MagentoObjectDataset(Dataset): + """Magento server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: The table name. Type: string (or Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(MagentoObjectDataset, self).__init__(**kwargs) + self.type = 'MagentoObject' # type: str + self.table_name = kwargs.get('table_name', None) + + +class MagentoSource(TabularSource): + """A copy activity Magento server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(MagentoSource, self).__init__(**kwargs) + self.type = 'MagentoSource' # type: str + self.query = kwargs.get('query', None) + + +class ManagedIdentity(msrest.serialization.Model): + """The workspace managed identity. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar principal_id: The principal ID of the workspace managed identity. + :vartype principal_id: str + :ivar tenant_id: The tenant ID of the workspace managed identity. + :vartype tenant_id: str + :param type: The type of managed identity for the workspace. Possible values include: "None", + "SystemAssigned". + :type type: str or ~azure.synapse.artifacts.models.ResourceIdentityType + """ + + _validation = { + 'principal_id': {'readonly': True}, + 'tenant_id': {'readonly': True}, + } + + _attribute_map = { + 'principal_id': {'key': 'principalId', 'type': 'str'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ManagedIdentity, self).__init__(**kwargs) + self.principal_id = None + self.tenant_id = None + self.type = kwargs.get('type', None) + + +class ManagedIntegrationRuntime(IntegrationRuntime): + """Managed integration runtime, including managed elastic and managed dedicated integration runtimes. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of integration runtime.Constant filled by server. Possible values + include: "Managed", "SelfHosted". + :type type: str or ~azure.synapse.artifacts.models.IntegrationRuntimeType + :param description: Integration runtime description. + :type description: str + :ivar state: Integration runtime state, only valid for managed dedicated integration runtime. + Possible values include: "Initial", "Stopped", "Started", "Starting", "Stopping", + "NeedRegistration", "Online", "Limited", "Offline", "AccessDenied". + :vartype state: str or ~azure.synapse.artifacts.models.IntegrationRuntimeState + :param compute_properties: The compute resource for managed integration runtime. + :type compute_properties: ~azure.synapse.artifacts.models.IntegrationRuntimeComputeProperties + :param ssis_properties: SSIS properties for managed integration runtime. + :type ssis_properties: ~azure.synapse.artifacts.models.IntegrationRuntimeSsisProperties + """ + + _validation = { + 'type': {'required': True}, + 'state': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'state': {'key': 'state', 'type': 'str'}, + 'compute_properties': {'key': 'typeProperties.computeProperties', 'type': 'IntegrationRuntimeComputeProperties'}, + 'ssis_properties': {'key': 'typeProperties.ssisProperties', 'type': 'IntegrationRuntimeSsisProperties'}, + } + + def __init__( + self, + **kwargs + ): + super(ManagedIntegrationRuntime, self).__init__(**kwargs) + self.type = 'Managed' # type: str + self.state = None + self.compute_properties = kwargs.get('compute_properties', None) + self.ssis_properties = kwargs.get('ssis_properties', None) + + +class MappingDataFlow(DataFlow): + """Mapping data flow. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Type of data flow.Constant filled by server. + :type type: str + :param description: The description of the data flow. + :type description: str + :param annotations: List of tags that can be used for describing the data flow. + :type annotations: list[object] + :param folder: The folder that this data flow is in. If not specified, Data flow will appear at + the root level. + :type folder: ~azure.synapse.artifacts.models.DataFlowFolder + :param sources: List of sources in data flow. + :type sources: list[~azure.synapse.artifacts.models.DataFlowSource] + :param sinks: List of sinks in data flow. + :type sinks: list[~azure.synapse.artifacts.models.DataFlowSink] + :param transformations: List of transformations in data flow. + :type transformations: list[~azure.synapse.artifacts.models.Transformation] + :param script: DataFlow script. + :type script: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DataFlowFolder'}, + 'sources': {'key': 'typeProperties.sources', 'type': '[DataFlowSource]'}, + 'sinks': {'key': 'typeProperties.sinks', 'type': '[DataFlowSink]'}, + 'transformations': {'key': 'typeProperties.transformations', 'type': '[Transformation]'}, + 'script': {'key': 'typeProperties.script', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(MappingDataFlow, self).__init__(**kwargs) + self.type = 'MappingDataFlow' # type: str + self.sources = kwargs.get('sources', None) + self.sinks = kwargs.get('sinks', None) + self.transformations = kwargs.get('transformations', None) + self.script = kwargs.get('script', None) + + +class MariaDBLinkedService(LinkedService): + """MariaDB server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param connection_string: An ODBC connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param pwd: The Azure key vault secret reference of password in connection string. + :type pwd: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(MariaDBLinkedService, self).__init__(**kwargs) + self.type = 'MariaDB' # type: str + self.connection_string = kwargs.get('connection_string', None) + self.pwd = kwargs.get('pwd', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + + +class MariaDBSource(TabularSource): + """A copy activity MariaDB server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(MariaDBSource, self).__init__(**kwargs) + self.type = 'MariaDBSource' # type: str + self.query = kwargs.get('query', None) + + +class MariaDBTableDataset(Dataset): + """MariaDB server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: The table name. Type: string (or Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(MariaDBTableDataset, self).__init__(**kwargs) + self.type = 'MariaDBTable' # type: str + self.table_name = kwargs.get('table_name', None) + + +class MarketoLinkedService(LinkedService): + """Marketo server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param endpoint: Required. The endpoint of the Marketo server. (i.e. 123-ABC-321.mktorest.com). + :type endpoint: object + :param client_id: Required. The client Id of your Marketo service. + :type client_id: object + :param client_secret: The client secret of your Marketo service. + :type client_secret: ~azure.synapse.artifacts.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using + HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name in the server's + certificate to match the host name of the server when connecting over SSL. The default value is + true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of the server when + connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + 'client_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(MarketoLinkedService, self).__init__(**kwargs) + self.type = 'Marketo' # type: str + self.endpoint = kwargs['endpoint'] + self.client_id = kwargs['client_id'] + self.client_secret = kwargs.get('client_secret', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + + +class MarketoObjectDataset(Dataset): + """Marketo server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: The table name. Type: string (or Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(MarketoObjectDataset, self).__init__(**kwargs) + self.type = 'MarketoObject' # type: str + self.table_name = kwargs.get('table_name', None) + + +class MarketoSource(TabularSource): + """A copy activity Marketo server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(MarketoSource, self).__init__(**kwargs) + self.type = 'MarketoSource' # type: str + self.query = kwargs.get('query', None) + + +class MicrosoftAccessLinkedService(LinkedService): + """Microsoft Access linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param connection_string: Required. The non-access credential portion of the connection string + as well as an optional encrypted credential. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param authentication_type: Type of authentication used to connect to the Microsoft Access as + ODBC data store. Possible values are: Anonymous and Basic. Type: string (or Expression with + resultType string). + :type authentication_type: object + :param credential: The access credential portion of the connection string specified in driver- + specific property-value format. + :type credential: ~azure.synapse.artifacts.models.SecretBase + :param user_name: User name for Basic authentication. Type: string (or Expression with + resultType string). + :type user_name: object + :param password: Password for Basic authentication. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(MicrosoftAccessLinkedService, self).__init__(**kwargs) + self.type = 'MicrosoftAccess' # type: str + self.connection_string = kwargs['connection_string'] + self.authentication_type = kwargs.get('authentication_type', None) + self.credential = kwargs.get('credential', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + + +class MicrosoftAccessSink(CopySink): + """A copy activity Microsoft Access sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param pre_copy_script: A query to execute before starting the copy. Type: string (or + Expression with resultType string). + :type pre_copy_script: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(MicrosoftAccessSink, self).__init__(**kwargs) + self.type = 'MicrosoftAccessSink' # type: str + self.pre_copy_script = kwargs.get('pre_copy_script', None) + + +class MicrosoftAccessSource(CopySource): + """A copy activity source for Microsoft Access. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query: Database query. Type: string (or Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(MicrosoftAccessSource, self).__init__(**kwargs) + self.type = 'MicrosoftAccessSource' # type: str + self.query = kwargs.get('query', None) + + +class MicrosoftAccessTableDataset(Dataset): + """The Microsoft Access table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: The Microsoft Access table name. Type: string (or Expression with resultType + string). + :type table_name: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(MicrosoftAccessTableDataset, self).__init__(**kwargs) + self.type = 'MicrosoftAccessTable' # type: str + self.table_name = kwargs.get('table_name', None) + + +class MongoDbCollectionDataset(Dataset): + """The MongoDB database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param collection_name: Required. The table name of the MongoDB database. Type: string (or + Expression with resultType string). + :type collection_name: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + 'collection_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'collection_name': {'key': 'typeProperties.collectionName', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(MongoDbCollectionDataset, self).__init__(**kwargs) + self.type = 'MongoDbCollection' # type: str + self.collection_name = kwargs['collection_name'] + + +class MongoDbCursorMethodsProperties(msrest.serialization.Model): + """Cursor methods for Mongodb query. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param project: Specifies the fields to return in the documents that match the query filter. To + return all fields in the matching documents, omit this parameter. Type: string (or Expression + with resultType string). + :type project: object + :param sort: Specifies the order in which the query returns matching documents. Type: string + (or Expression with resultType string). Type: string (or Expression with resultType string). + :type sort: object + :param skip: Specifies the how many documents skipped and where MongoDB begins returning + results. This approach may be useful in implementing paginated results. Type: integer (or + Expression with resultType integer). + :type skip: object + :param limit: Specifies the maximum number of documents the server returns. limit() is + analogous to the LIMIT statement in a SQL database. Type: integer (or Expression with + resultType integer). + :type limit: object + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'project': {'key': 'project', 'type': 'object'}, + 'sort': {'key': 'sort', 'type': 'object'}, + 'skip': {'key': 'skip', 'type': 'object'}, + 'limit': {'key': 'limit', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(MongoDbCursorMethodsProperties, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.project = kwargs.get('project', None) + self.sort = kwargs.get('sort', None) + self.skip = kwargs.get('skip', None) + self.limit = kwargs.get('limit', None) + + +class MongoDbLinkedService(LinkedService): + """Linked service for MongoDb data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param server: Required. The IP address or server name of the MongoDB server. Type: string (or + Expression with resultType string). + :type server: object + :param authentication_type: The authentication type to be used to connect to the MongoDB + database. Possible values include: "Basic", "Anonymous". + :type authentication_type: str or ~azure.synapse.artifacts.models.MongoDbAuthenticationType + :param database_name: Required. The name of the MongoDB database that you want to access. Type: string (or Expression with resultType string). - :type tenant: object - :param cluster_resource_group: Required. The resource group where the cluster belongs. Type: + :type database_name: object + :param username: Username for authentication. Type: string (or Expression with resultType + string). + :type username: object + :param password: Password for authentication. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param auth_source: Database to verify the username and password. Type: string (or Expression + with resultType string). + :type auth_source: object + :param port: The TCP port number that the MongoDB server uses to listen for client connections. + The default value is 27017. Type: integer (or Expression with resultType integer), minimum: 0. + :type port: object + :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The + default value is false. Type: boolean (or Expression with resultType boolean). + :type enable_ssl: object + :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from + the server. The default value is false. Type: boolean (or Expression with resultType boolean). + :type allow_self_signed_server_cert: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'server': {'required': True}, + 'database_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'database_name': {'key': 'typeProperties.databaseName', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'auth_source': {'key': 'typeProperties.authSource', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(MongoDbLinkedService, self).__init__(**kwargs) + self.type = 'MongoDb' # type: str + self.server = kwargs['server'] + self.authentication_type = kwargs.get('authentication_type', None) + self.database_name = kwargs['database_name'] + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.auth_source = kwargs.get('auth_source', None) + self.port = kwargs.get('port', None) + self.enable_ssl = kwargs.get('enable_ssl', None) + self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + + +class MongoDbSource(CopySource): + """A copy activity source for a MongoDB database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query: Database query. Should be a SQL-92 query expression. Type: string (or Expression + with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(MongoDbSource, self).__init__(**kwargs) + self.type = 'MongoDbSource' # type: str + self.query = kwargs.get('query', None) + + +class MongoDbV2CollectionDataset(Dataset): + """The MongoDB database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param collection: Required. The collection name of the MongoDB database. Type: string (or + Expression with resultType string). + :type collection: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + 'collection': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(MongoDbV2CollectionDataset, self).__init__(**kwargs) + self.type = 'MongoDbV2Collection' # type: str + self.collection = kwargs['collection'] + + +class MongoDbV2LinkedService(LinkedService): + """Linked service for MongoDB data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param connection_string: Required. The MongoDB connection string. Type: string, SecureString + or AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param database: Required. The name of the MongoDB database that you want to access. Type: string (or Expression with resultType string). - :type cluster_resource_group: object - :param cluster_name_prefix: The prefix of cluster name, postfix will be distinct with - timestamp. Type: string (or Expression with resultType string). - :type cluster_name_prefix: object - :param cluster_user_name: The username to access the cluster. Type: string (or Expression with + :type database: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + 'database': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(MongoDbV2LinkedService, self).__init__(**kwargs) + self.type = 'MongoDbV2' # type: str + self.connection_string = kwargs['connection_string'] + self.database = kwargs['database'] + + +class MongoDbV2Source(CopySource): + """A copy activity source for a MongoDB database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param filter: Specifies selection filter using query operators. To return all documents in a + collection, omit this parameter or pass an empty document ({}). Type: string (or Expression + with resultType string). + :type filter: object + :param cursor_methods: Cursor methods for Mongodb query. + :type cursor_methods: ~azure.synapse.artifacts.models.MongoDbCursorMethodsProperties + :param batch_size: Specifies the number of documents to return in each batch of the response + from MongoDB instance. In most cases, modifying the batch size will not affect the user or the + application. This property's main purpose is to avoid hit the limitation of response size. + Type: integer (or Expression with resultType integer). + :type batch_size: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'filter': {'key': 'filter', 'type': 'object'}, + 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, + 'batch_size': {'key': 'batchSize', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(MongoDbV2Source, self).__init__(**kwargs) + self.type = 'MongoDbV2Source' # type: str + self.filter = kwargs.get('filter', None) + self.cursor_methods = kwargs.get('cursor_methods', None) + self.batch_size = kwargs.get('batch_size', None) + self.query_timeout = kwargs.get('query_timeout', None) + + +class MySqlLinkedService(LinkedService): + """Linked service for MySQL data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param connection_string: Required. The connection string. + :type connection_string: object + :param password: The Azure key vault secret reference of password in connection string. + :type password: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(MySqlLinkedService, self).__init__(**kwargs) + self.type = 'MySql' # type: str + self.connection_string = kwargs['connection_string'] + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + + +class MySqlSource(TabularSource): + """A copy activity source for MySQL databases. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: Database query. Type: string (or Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(MySqlSource, self).__init__(**kwargs) + self.type = 'MySqlSource' # type: str + self.query = kwargs.get('query', None) + + +class MySqlTableDataset(Dataset): + """The MySQL table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: The MySQL table name. Type: string (or Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(MySqlTableDataset, self).__init__(**kwargs) + self.type = 'MySqlTable' # type: str + self.table_name = kwargs.get('table_name', None) + + +class NetezzaLinkedService(LinkedService): + """Netezza linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param connection_string: An ODBC connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param pwd: The Azure key vault secret reference of password in connection string. + :type pwd: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type cluster_user_name: object - :param cluster_password: The password to access the cluster. - :type cluster_password: ~azure.synapse.artifacts.models.SecretBase - :param cluster_ssh_user_name: The username to SSH remotely connect to cluster’s node (for - Linux). Type: string (or Expression with resultType string). - :type cluster_ssh_user_name: object - :param cluster_ssh_password: The password to SSH remotely connect cluster’s node (for Linux). - :type cluster_ssh_password: ~azure.synapse.artifacts.models.SecretBase - :param additional_linked_service_names: Specifies additional storage accounts for the HDInsight - linked service so that the Data Factory service can register them on your behalf. - :type additional_linked_service_names: - list[~azure.synapse.artifacts.models.LinkedServiceReference] - :param hcatalog_linked_service_name: The name of Azure SQL linked service that point to the - HCatalog database. The on-demand HDInsight cluster is created by using the Azure SQL database - as the metastore. - :type hcatalog_linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param cluster_type: The cluster type. Type: string (or Expression with resultType string). - :type cluster_type: object - :param spark_version: The version of spark if the cluster type is 'spark'. Type: string (or + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(NetezzaLinkedService, self).__init__(**kwargs) + self.type = 'Netezza' # type: str + self.connection_string = kwargs.get('connection_string', None) + self.pwd = kwargs.get('pwd', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + + +class NetezzaPartitionSettings(msrest.serialization.Model): + """The settings that will be leveraged for Netezza source partitioning. + + :param partition_column_name: The name of the column in integer type that will be used for + proceeding range partitioning. Type: string (or Expression with resultType string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of column specified in partitionColumnName that + will be used for proceeding range partitioning. Type: string (or Expression with resultType + string). + :type partition_upper_bound: object + :param partition_lower_bound: The minimum value of column specified in partitionColumnName that + will be used for proceeding range partitioning. Type: string (or Expression with resultType + string). + :type partition_lower_bound: object + """ + + _attribute_map = { + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(NetezzaPartitionSettings, self).__init__(**kwargs) + self.partition_column_name = kwargs.get('partition_column_name', None) + self.partition_upper_bound = kwargs.get('partition_upper_bound', None) + self.partition_lower_bound = kwargs.get('partition_lower_bound', None) + + +class NetezzaSource(TabularSource): + """A copy activity Netezza source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + :param partition_option: The partition mechanism that will be used for Netezza read in + parallel. Possible values include: "None", "DataSlice", "DynamicRange". + :type partition_option: str or ~azure.synapse.artifacts.models.NetezzaPartitionOption + :param partition_settings: The settings that will be leveraged for Netezza source partitioning. + :type partition_settings: ~azure.synapse.artifacts.models.NetezzaPartitionSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'NetezzaPartitionSettings'}, + } + + def __init__( + self, + **kwargs + ): + super(NetezzaSource, self).__init__(**kwargs) + self.type = 'NetezzaSource' # type: str + self.query = kwargs.get('query', None) + self.partition_option = kwargs.get('partition_option', None) + self.partition_settings = kwargs.get('partition_settings', None) + + +class NetezzaTableDataset(Dataset): + """Netezza dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: This property will be retired. Please consider using schema + table + properties instead. + :type table_name: object + :param table: The table name of the Netezza. Type: string (or Expression with resultType + string). + :type table: object + :param schema_type_properties_schema: The schema name of the Netezza. Type: string (or Expression with resultType string). - :type spark_version: object - :param core_configuration: Specifies the core configuration parameters (as in core-site.xml) - for the HDInsight cluster to be created. - :type core_configuration: object - :param h_base_configuration: Specifies the HBase configuration parameters (hbase-site.xml) for - the HDInsight cluster. - :type h_base_configuration: object - :param hdfs_configuration: Specifies the HDFS configuration parameters (hdfs-site.xml) for the - HDInsight cluster. - :type hdfs_configuration: object - :param hive_configuration: Specifies the hive configuration parameters (hive-site.xml) for the - HDInsight cluster. - :type hive_configuration: object - :param map_reduce_configuration: Specifies the MapReduce configuration parameters (mapred- - site.xml) for the HDInsight cluster. - :type map_reduce_configuration: object - :param oozie_configuration: Specifies the Oozie configuration parameters (oozie-site.xml) for - the HDInsight cluster. - :type oozie_configuration: object - :param storm_configuration: Specifies the Storm configuration parameters (storm-site.xml) for - the HDInsight cluster. - :type storm_configuration: object - :param yarn_configuration: Specifies the Yarn configuration parameters (yarn-site.xml) for the - HDInsight cluster. - :type yarn_configuration: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - :param head_node_size: Specifies the size of the head node for the HDInsight cluster. - :type head_node_size: object - :param data_node_size: Specifies the size of the data node for the HDInsight cluster. - :type data_node_size: object - :param zookeeper_node_size: Specifies the size of the Zoo Keeper node for the HDInsight - cluster. - :type zookeeper_node_size: object - :param script_actions: Custom script actions to run on HDI ondemand cluster once it's up. - Please refer to https://docs.microsoft.com/en-us/azure/hdinsight/hdinsight-hadoop-customize- - cluster-linux?toc=%2Fen-us%2Fazure%2Fhdinsight%2Fr-server%2FTOC.json&bc=%2Fen- - us%2Fazure%2Fbread%2Ftoc.json#understanding-script-actions. - :type script_actions: list[~azure.synapse.artifacts.models.ScriptAction] - :param virtual_network_id: The ARM resource ID for the vNet to which the cluster should be - joined after creation. Type: string (or Expression with resultType string). - :type virtual_network_id: object - :param subnet_name: The ARM resource ID for the subnet in the vNet. If virtualNetworkId was - specified, then this property is required. Type: string (or Expression with resultType string). - :type subnet_name: object + :type schema_type_properties_schema: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(NetezzaTableDataset, self).__init__(**kwargs) + self.type = 'NetezzaTable' # type: str + self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) + + +class Notebook(msrest.serialization.Model): + """Notebook. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param description: The description of the notebook. + :type description: str + :param big_data_pool: Big data pool reference. + :type big_data_pool: ~azure.synapse.artifacts.models.BigDataPoolReference + :param session_properties: Session properties. + :type session_properties: ~azure.synapse.artifacts.models.NotebookSessionProperties + :param metadata: Required. Notebook root-level metadata. + :type metadata: ~azure.synapse.artifacts.models.NotebookMetadata + :param nbformat: Required. Notebook format (major number). Incremented between backwards + incompatible changes to the notebook format. + :type nbformat: int + :param nbformat_minor: Required. Notebook format (minor number). Incremented for backward + compatible changes to the notebook format. + :type nbformat_minor: int + :param cells: Required. Array of cells of the current notebook. + :type cells: list[~azure.synapse.artifacts.models.NotebookCell] + """ + + _validation = { + 'metadata': {'required': True}, + 'nbformat': {'required': True}, + 'nbformat_minor': {'required': True}, + 'cells': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'big_data_pool': {'key': 'bigDataPool', 'type': 'BigDataPoolReference'}, + 'session_properties': {'key': 'sessionProperties', 'type': 'NotebookSessionProperties'}, + 'metadata': {'key': 'metadata', 'type': 'NotebookMetadata'}, + 'nbformat': {'key': 'nbformat', 'type': 'int'}, + 'nbformat_minor': {'key': 'nbformat_minor', 'type': 'int'}, + 'cells': {'key': 'cells', 'type': '[NotebookCell]'}, + } + + def __init__( + self, + **kwargs + ): + super(Notebook, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.description = kwargs.get('description', None) + self.big_data_pool = kwargs.get('big_data_pool', None) + self.session_properties = kwargs.get('session_properties', None) + self.metadata = kwargs['metadata'] + self.nbformat = kwargs['nbformat'] + self.nbformat_minor = kwargs['nbformat_minor'] + self.cells = kwargs['cells'] + + +class NotebookCell(msrest.serialization.Model): + """Notebook cell. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param cell_type: Required. String identifying the type of cell. + :type cell_type: str + :param metadata: Required. Cell-level metadata. + :type metadata: object + :param source: Required. Contents of the cell, represented as an array of lines. + :type source: list[str] + :param attachments: Attachments associated with the cell. + :type attachments: object + :param outputs: Cell-level output items. + :type outputs: list[~azure.synapse.artifacts.models.NotebookCellOutputItem] + """ + + _validation = { + 'cell_type': {'required': True}, + 'metadata': {'required': True}, + 'source': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'cell_type': {'key': 'cell_type', 'type': 'str'}, + 'metadata': {'key': 'metadata', 'type': 'object'}, + 'source': {'key': 'source', 'type': '[str]'}, + 'attachments': {'key': 'attachments', 'type': 'object'}, + 'outputs': {'key': 'outputs', 'type': '[NotebookCellOutputItem]'}, + } + + def __init__( + self, + **kwargs + ): + super(NotebookCell, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.cell_type = kwargs['cell_type'] + self.metadata = kwargs['metadata'] + self.source = kwargs['source'] + self.attachments = kwargs.get('attachments', None) + self.outputs = kwargs.get('outputs', None) + + +class NotebookCellOutputItem(msrest.serialization.Model): + """An item of the notebook cell execution output. + + All required parameters must be populated in order to send to Azure. + + :param name: For output_type=stream, determines the name of stream (stdout / stderr). + :type name: str + :param execution_count: Execution sequence number. + :type execution_count: int + :param output_type: Required. Execution, display, or stream outputs. Possible values include: + "execute_result", "display_data", "stream", "error". + :type output_type: str or ~azure.synapse.artifacts.models.CellOutputType + :param text: For output_type=stream, the stream's text output, represented as a string or an + array of strings. + :type text: object + :param data: Output data. Use MIME type as key, and content as value. + :type data: object + :param metadata: Metadata for the output item. + :type metadata: object + """ + + _validation = { + 'output_type': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'execution_count': {'key': 'execution_count', 'type': 'int'}, + 'output_type': {'key': 'output_type', 'type': 'str'}, + 'text': {'key': 'text', 'type': 'object'}, + 'data': {'key': 'data', 'type': 'object'}, + 'metadata': {'key': 'metadata', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(NotebookCellOutputItem, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.execution_count = kwargs.get('execution_count', None) + self.output_type = kwargs['output_type'] + self.text = kwargs.get('text', None) + self.data = kwargs.get('data', None) + self.metadata = kwargs.get('metadata', None) + + +class NotebookKernelSpec(msrest.serialization.Model): + """Kernel information. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Name of the kernel specification. + :type name: str + :param display_name: Required. Name to display in UI. + :type display_name: str """ _validation = { - 'type': {'required': True}, - 'cluster_size': {'required': True}, - 'time_to_live': {'required': True}, - 'version': {'required': True}, - 'linked_service_name': {'required': True}, - 'host_subscription_id': {'required': True}, - 'tenant': {'required': True}, - 'cluster_resource_group': {'required': True}, + 'name': {'required': True}, + 'display_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'cluster_size': {'key': 'typeProperties.clusterSize', 'type': 'object'}, - 'time_to_live': {'key': 'typeProperties.timeToLive', 'type': 'object'}, - 'version': {'key': 'typeProperties.version', 'type': 'object'}, - 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, - 'host_subscription_id': {'key': 'typeProperties.hostSubscriptionId', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'cluster_resource_group': {'key': 'typeProperties.clusterResourceGroup', 'type': 'object'}, - 'cluster_name_prefix': {'key': 'typeProperties.clusterNamePrefix', 'type': 'object'}, - 'cluster_user_name': {'key': 'typeProperties.clusterUserName', 'type': 'object'}, - 'cluster_password': {'key': 'typeProperties.clusterPassword', 'type': 'SecretBase'}, - 'cluster_ssh_user_name': {'key': 'typeProperties.clusterSshUserName', 'type': 'object'}, - 'cluster_ssh_password': {'key': 'typeProperties.clusterSshPassword', 'type': 'SecretBase'}, - 'additional_linked_service_names': {'key': 'typeProperties.additionalLinkedServiceNames', 'type': '[LinkedServiceReference]'}, - 'hcatalog_linked_service_name': {'key': 'typeProperties.hcatalogLinkedServiceName', 'type': 'LinkedServiceReference'}, - 'cluster_type': {'key': 'typeProperties.clusterType', 'type': 'object'}, - 'spark_version': {'key': 'typeProperties.sparkVersion', 'type': 'object'}, - 'core_configuration': {'key': 'typeProperties.coreConfiguration', 'type': 'object'}, - 'h_base_configuration': {'key': 'typeProperties.hBaseConfiguration', 'type': 'object'}, - 'hdfs_configuration': {'key': 'typeProperties.hdfsConfiguration', 'type': 'object'}, - 'hive_configuration': {'key': 'typeProperties.hiveConfiguration', 'type': 'object'}, - 'map_reduce_configuration': {'key': 'typeProperties.mapReduceConfiguration', 'type': 'object'}, - 'oozie_configuration': {'key': 'typeProperties.oozieConfiguration', 'type': 'object'}, - 'storm_configuration': {'key': 'typeProperties.stormConfiguration', 'type': 'object'}, - 'yarn_configuration': {'key': 'typeProperties.yarnConfiguration', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'head_node_size': {'key': 'typeProperties.headNodeSize', 'type': 'object'}, - 'data_node_size': {'key': 'typeProperties.dataNodeSize', 'type': 'object'}, - 'zookeeper_node_size': {'key': 'typeProperties.zookeeperNodeSize', 'type': 'object'}, - 'script_actions': {'key': 'typeProperties.scriptActions', 'type': '[ScriptAction]'}, - 'virtual_network_id': {'key': 'typeProperties.virtualNetworkId', 'type': 'object'}, - 'subnet_name': {'key': 'typeProperties.subnetName', 'type': 'object'}, + 'name': {'key': 'name', 'type': 'str'}, + 'display_name': {'key': 'display_name', 'type': 'str'}, } def __init__( self, **kwargs ): - super(HDInsightOnDemandLinkedService, self).__init__(**kwargs) - self.type = 'HDInsightOnDemand' - self.cluster_size = kwargs['cluster_size'] - self.time_to_live = kwargs['time_to_live'] - self.version = kwargs['version'] - self.linked_service_name = kwargs['linked_service_name'] - self.host_subscription_id = kwargs['host_subscription_id'] - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.tenant = kwargs['tenant'] - self.cluster_resource_group = kwargs['cluster_resource_group'] - self.cluster_name_prefix = kwargs.get('cluster_name_prefix', None) - self.cluster_user_name = kwargs.get('cluster_user_name', None) - self.cluster_password = kwargs.get('cluster_password', None) - self.cluster_ssh_user_name = kwargs.get('cluster_ssh_user_name', None) - self.cluster_ssh_password = kwargs.get('cluster_ssh_password', None) - self.additional_linked_service_names = kwargs.get('additional_linked_service_names', None) - self.hcatalog_linked_service_name = kwargs.get('hcatalog_linked_service_name', None) - self.cluster_type = kwargs.get('cluster_type', None) - self.spark_version = kwargs.get('spark_version', None) - self.core_configuration = kwargs.get('core_configuration', None) - self.h_base_configuration = kwargs.get('h_base_configuration', None) - self.hdfs_configuration = kwargs.get('hdfs_configuration', None) - self.hive_configuration = kwargs.get('hive_configuration', None) - self.map_reduce_configuration = kwargs.get('map_reduce_configuration', None) - self.oozie_configuration = kwargs.get('oozie_configuration', None) - self.storm_configuration = kwargs.get('storm_configuration', None) - self.yarn_configuration = kwargs.get('yarn_configuration', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.head_node_size = kwargs.get('head_node_size', None) - self.data_node_size = kwargs.get('data_node_size', None) - self.zookeeper_node_size = kwargs.get('zookeeper_node_size', None) - self.script_actions = kwargs.get('script_actions', None) - self.virtual_network_id = kwargs.get('virtual_network_id', None) - self.subnet_name = kwargs.get('subnet_name', None) + super(NotebookKernelSpec, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.name = kwargs['name'] + self.display_name = kwargs['display_name'] -class HDInsightPigActivity(ExecutionActivity): - """HDInsight Pig activity type. +class NotebookLanguageInfo(msrest.serialization.Model): + """Language info. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. + :param name: Required. The programming language which this kernel runs. :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param storage_linked_services: Storage linked service references. - :type storage_linked_services: list[~azure.synapse.artifacts.models.LinkedServiceReference] - :param arguments: User specified arguments to HDInsightActivity. Type: array (or Expression - with resultType array). - :type arguments: object - :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". - :type get_debug_info: str or ~azure.synapse.artifacts.models.HDInsightActivityDebugInfoOption - :param script_path: Script path. Type: string (or Expression with resultType string). - :type script_path: object - :param script_linked_service: Script linked service reference. - :type script_linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference - :param defines: Allows user to specify defines for Pig job request. - :type defines: dict[str, object] + :param codemirror_mode: The codemirror mode to use for code in this language. + :type codemirror_mode: str """ _validation = { 'name': {'required': True}, - 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, - 'arguments': {'key': 'typeProperties.arguments', 'type': 'object'}, - 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, - 'script_path': {'key': 'typeProperties.scriptPath', 'type': 'object'}, - 'script_linked_service': {'key': 'typeProperties.scriptLinkedService', 'type': 'LinkedServiceReference'}, - 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, + 'codemirror_mode': {'key': 'codemirror_mode', 'type': 'str'}, } def __init__( self, **kwargs ): - super(HDInsightPigActivity, self).__init__(**kwargs) - self.type = 'HDInsightPig' - self.storage_linked_services = kwargs.get('storage_linked_services', None) - self.arguments = kwargs.get('arguments', None) - self.get_debug_info = kwargs.get('get_debug_info', None) - self.script_path = kwargs.get('script_path', None) - self.script_linked_service = kwargs.get('script_linked_service', None) - self.defines = kwargs.get('defines', None) + super(NotebookLanguageInfo, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.name = kwargs['name'] + self.codemirror_mode = kwargs.get('codemirror_mode', None) -class HDInsightSparkActivity(ExecutionActivity): - """HDInsight Spark activity. +class NotebookListResponse(msrest.serialization.Model): + """A list of Notebook resources. All required parameters must be populated in order to send to Azure. + :param value: Required. List of Notebooks. + :type value: list[~azure.synapse.artifacts.models.NotebookResource] + :param next_link: The link to the next page of results, if any remaining results exist. + :type next_link: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[NotebookResource]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(NotebookListResponse, self).__init__(**kwargs) + self.value = kwargs['value'] + self.next_link = kwargs.get('next_link', None) + + +class NotebookMetadata(msrest.serialization.Model): + """Notebook root-level metadata. + :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param root_path: Required. The root path in 'sparkJobLinkedService' for all the job’s files. - Type: string (or Expression with resultType string). - :type root_path: object - :param entry_file_path: Required. The relative path to the root folder of the code/package to - be executed. Type: string (or Expression with resultType string). - :type entry_file_path: object - :param arguments: The user-specified arguments to HDInsightSparkActivity. - :type arguments: list[object] - :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". - :type get_debug_info: str or ~azure.synapse.artifacts.models.HDInsightActivityDebugInfoOption - :param spark_job_linked_service: The storage linked service for uploading the entry file and - dependencies, and for receiving logs. - :type spark_job_linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference - :param class_name: The application's Java/Spark main class. - :type class_name: str - :param proxy_user: The user to impersonate that will execute the job. Type: string (or - Expression with resultType string). - :type proxy_user: object - :param spark_config: Spark configuration property. - :type spark_config: dict[str, object] + :param kernelspec: Kernel information. + :type kernelspec: ~azure.synapse.artifacts.models.NotebookKernelSpec + :param language_info: Language info. + :type language_info: ~azure.synapse.artifacts.models.NotebookLanguageInfo + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'kernelspec': {'key': 'kernelspec', 'type': 'NotebookKernelSpec'}, + 'language_info': {'key': 'language_info', 'type': 'NotebookLanguageInfo'}, + } + + def __init__( + self, + **kwargs + ): + super(NotebookMetadata, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.kernelspec = kwargs.get('kernelspec', None) + self.language_info = kwargs.get('language_info', None) + + +class NotebookResource(AzureEntityResource): + """Notebook resource type. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + :ivar etag: Resource Etag. + :vartype etag: str + :param properties: Required. Properties of Notebook. + :type properties: ~azure.synapse.artifacts.models.Notebook """ _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'root_path': {'required': True}, - 'entry_file_path': {'required': True}, + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, + 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'root_path': {'key': 'typeProperties.rootPath', 'type': 'object'}, - 'entry_file_path': {'key': 'typeProperties.entryFilePath', 'type': 'object'}, - 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, - 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, - 'spark_job_linked_service': {'key': 'typeProperties.sparkJobLinkedService', 'type': 'LinkedServiceReference'}, - 'class_name': {'key': 'typeProperties.className', 'type': 'str'}, - 'proxy_user': {'key': 'typeProperties.proxyUser', 'type': 'object'}, - 'spark_config': {'key': 'typeProperties.sparkConfig', 'type': '{object}'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'Notebook'}, } def __init__( self, **kwargs ): - super(HDInsightSparkActivity, self).__init__(**kwargs) - self.type = 'HDInsightSpark' - self.root_path = kwargs['root_path'] - self.entry_file_path = kwargs['entry_file_path'] - self.arguments = kwargs.get('arguments', None) - self.get_debug_info = kwargs.get('get_debug_info', None) - self.spark_job_linked_service = kwargs.get('spark_job_linked_service', None) - self.class_name = kwargs.get('class_name', None) - self.proxy_user = kwargs.get('proxy_user', None) - self.spark_config = kwargs.get('spark_config', None) + super(NotebookResource, self).__init__(**kwargs) + self.properties = kwargs['properties'] -class HDInsightStreamingActivity(ExecutionActivity): - """HDInsight streaming activity type. +class NotebookSessionProperties(msrest.serialization.Model): + """Session properties. All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param storage_linked_services: Storage linked service references. - :type storage_linked_services: list[~azure.synapse.artifacts.models.LinkedServiceReference] - :param arguments: User specified arguments to HDInsightActivity. - :type arguments: list[object] - :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". - :type get_debug_info: str or ~azure.synapse.artifacts.models.HDInsightActivityDebugInfoOption - :param mapper: Required. Mapper executable name. Type: string (or Expression with resultType - string). - :type mapper: object - :param reducer: Required. Reducer executable name. Type: string (or Expression with resultType - string). - :type reducer: object - :param input: Required. Input blob path. Type: string (or Expression with resultType string). - :type input: object - :param output: Required. Output blob path. Type: string (or Expression with resultType string). - :type output: object - :param file_paths: Required. Paths to streaming job files. Can be directories. - :type file_paths: list[object] - :param file_linked_service: Linked service reference where the files are located. - :type file_linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference - :param combiner: Combiner executable name. Type: string (or Expression with resultType string). - :type combiner: object - :param command_environment: Command line environment values. - :type command_environment: list[object] - :param defines: Allows user to specify defines for streaming job request. - :type defines: dict[str, object] + :param driver_memory: Required. Amount of memory to use for the driver process. + :type driver_memory: str + :param driver_cores: Required. Number of cores to use for the driver. + :type driver_cores: int + :param executor_memory: Required. Amount of memory to use per executor process. + :type executor_memory: str + :param executor_cores: Required. Number of cores to use for each executor. + :type executor_cores: int + :param num_executors: Required. Number of executors to launch for this session. + :type num_executors: int """ _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'mapper': {'required': True}, - 'reducer': {'required': True}, - 'input': {'required': True}, - 'output': {'required': True}, - 'file_paths': {'required': True}, + 'driver_memory': {'required': True}, + 'driver_cores': {'required': True}, + 'executor_memory': {'required': True}, + 'executor_cores': {'required': True}, + 'num_executors': {'required': True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, - 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, - 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, - 'mapper': {'key': 'typeProperties.mapper', 'type': 'object'}, - 'reducer': {'key': 'typeProperties.reducer', 'type': 'object'}, - 'input': {'key': 'typeProperties.input', 'type': 'object'}, - 'output': {'key': 'typeProperties.output', 'type': 'object'}, - 'file_paths': {'key': 'typeProperties.filePaths', 'type': '[object]'}, - 'file_linked_service': {'key': 'typeProperties.fileLinkedService', 'type': 'LinkedServiceReference'}, - 'combiner': {'key': 'typeProperties.combiner', 'type': 'object'}, - 'command_environment': {'key': 'typeProperties.commandEnvironment', 'type': '[object]'}, - 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, + 'driver_memory': {'key': 'driverMemory', 'type': 'str'}, + 'driver_cores': {'key': 'driverCores', 'type': 'int'}, + 'executor_memory': {'key': 'executorMemory', 'type': 'str'}, + 'executor_cores': {'key': 'executorCores', 'type': 'int'}, + 'num_executors': {'key': 'numExecutors', 'type': 'int'}, } def __init__( self, **kwargs ): - super(HDInsightStreamingActivity, self).__init__(**kwargs) - self.type = 'HDInsightStreaming' - self.storage_linked_services = kwargs.get('storage_linked_services', None) - self.arguments = kwargs.get('arguments', None) - self.get_debug_info = kwargs.get('get_debug_info', None) - self.mapper = kwargs['mapper'] - self.reducer = kwargs['reducer'] - self.input = kwargs['input'] - self.output = kwargs['output'] - self.file_paths = kwargs['file_paths'] - self.file_linked_service = kwargs.get('file_linked_service', None) - self.combiner = kwargs.get('combiner', None) - self.command_environment = kwargs.get('command_environment', None) - self.defines = kwargs.get('defines', None) + super(NotebookSessionProperties, self).__init__(**kwargs) + self.driver_memory = kwargs['driver_memory'] + self.driver_cores = kwargs['driver_cores'] + self.executor_memory = kwargs['executor_memory'] + self.executor_cores = kwargs['executor_cores'] + self.num_executors = kwargs['num_executors'] -class HiveLinkedService(LinkedService): - """Hive Server linked service. +class ODataLinkedService(LinkedService): + """Open Data Protocol (OData) linked service. All required parameters must be populated in order to send to Azure. @@ -9588,53 +19429,42 @@ class HiveLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param host: Required. IP address or host name of the Hive server, separated by ';' for - multiple hosts (only when serviceDiscoveryMode is enable). - :type host: object - :param port: The TCP port that the Hive server uses to listen for client connections. - :type port: object - :param server_type: The type of Hive server. Possible values include: "HiveServer1", - "HiveServer2", "HiveThriftServer". - :type server_type: str or ~azure.synapse.artifacts.models.HiveServerType - :param thrift_transport_protocol: The transport protocol to use in the Thrift layer. Possible - values include: "Binary", "SASL", "HTTP ". - :type thrift_transport_protocol: str or - ~azure.synapse.artifacts.models.HiveThriftTransportProtocol - :param authentication_type: Required. The authentication method used to access the Hive server. - Possible values include: "Anonymous", "Username", "UsernameAndPassword", - "WindowsAzureHDInsightService". - :type authentication_type: str or ~azure.synapse.artifacts.models.HiveAuthenticationType - :param service_discovery_mode: true to indicate using the ZooKeeper service, false not. - :type service_discovery_mode: object - :param zoo_keeper_name_space: The namespace on ZooKeeper under which Hive Server 2 nodes are - added. - :type zoo_keeper_name_space: object - :param use_native_query: Specifies whether the driver uses native HiveQL queries,or converts - them into an equivalent form in HiveQL. - :type use_native_query: object - :param username: The user name that you use to access Hive Server. - :type username: object - :param password: The password corresponding to the user name that you provided in the Username - field. + :param url: Required. The URL of the OData service endpoint. Type: string (or Expression with + resultType string). + :type url: object + :param authentication_type: Type of authentication used to connect to the OData service. + Possible values include: "Basic", "Anonymous", "Windows", "AadServicePrincipal", + "ManagedServiceIdentity". + :type authentication_type: str or ~azure.synapse.artifacts.models.ODataAuthenticationType + :param user_name: User name of the OData service. Type: string (or Expression with resultType + string). + :type user_name: object + :param password: Password of the OData service. :type password: ~azure.synapse.artifacts.models.SecretBase - :param http_path: The partial URL corresponding to the Hive server. - :type http_path: object - :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The - default value is false. - :type enable_ssl: object - :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for - verifying the server when connecting over SSL. This property can only be set when using SSL on - self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust - store or from a specified PEM file. The default value is false. - :type use_system_trust_store: object - :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate - name to match the host name of the server when connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: object - :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from - the server. The default value is false. - :type allow_self_signed_server_cert: object + :param tenant: Specify the tenant information (domain name or tenant ID) under which your + application resides. Type: string (or Expression with resultType string). + :type tenant: object + :param service_principal_id: Specify the application id of your application registered in Azure + Active Directory. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param aad_resource_id: Specify the resource you are requesting authorization to use Directory. + Type: string (or Expression with resultType string). + :type aad_resource_id: object + :param aad_service_principal_credential_type: Specify the credential type (key or cert) is used + for service principal. Possible values include: "ServicePrincipalKey", "ServicePrincipalCert". + :type aad_service_principal_credential_type: str or + ~azure.synapse.artifacts.models.ODataAadServicePrincipalCredentialType + :param service_principal_key: Specify the secret of your application registered in Azure Active + Directory. Type: string (or Expression with resultType string). + :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase + :param service_principal_embedded_cert: Specify the base64 encoded certificate of your + application registered in Azure Active Directory. Type: string (or Expression with resultType + string). + :type service_principal_embedded_cert: ~azure.synapse.artifacts.models.SecretBase + :param service_principal_embedded_cert_password: Specify the password of your certificate if + your certificate has a password and you are using AadServicePrincipal authentication. Type: + string (or Expression with resultType string). + :type service_principal_embedded_cert_password: ~azure.synapse.artifacts.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -9643,8 +19473,7 @@ class HiveLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'host': {'required': True}, - 'authentication_type': {'required': True}, + 'url': {'required': True}, } _attribute_map = { @@ -9654,22 +19483,17 @@ class HiveLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'server_type': {'key': 'typeProperties.serverType', 'type': 'str'}, - 'thrift_transport_protocol': {'key': 'typeProperties.thriftTransportProtocol', 'type': 'str'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'service_discovery_mode': {'key': 'typeProperties.serviceDiscoveryMode', 'type': 'object'}, - 'zoo_keeper_name_space': {'key': 'typeProperties.zooKeeperNameSpace', 'type': 'object'}, - 'use_native_query': {'key': 'typeProperties.useNativeQuery', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, - 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, - 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, + 'aad_service_principal_credential_type': {'key': 'typeProperties.aadServicePrincipalCredentialType', 'type': 'str'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'service_principal_embedded_cert': {'key': 'typeProperties.servicePrincipalEmbeddedCert', 'type': 'SecretBase'}, + 'service_principal_embedded_cert_password': {'key': 'typeProperties.servicePrincipalEmbeddedCertPassword', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -9677,29 +19501,24 @@ def __init__( self, **kwargs ): - super(HiveLinkedService, self).__init__(**kwargs) - self.type = 'Hive' - self.host = kwargs['host'] - self.port = kwargs.get('port', None) - self.server_type = kwargs.get('server_type', None) - self.thrift_transport_protocol = kwargs.get('thrift_transport_protocol', None) - self.authentication_type = kwargs['authentication_type'] - self.service_discovery_mode = kwargs.get('service_discovery_mode', None) - self.zoo_keeper_name_space = kwargs.get('zoo_keeper_name_space', None) - self.use_native_query = kwargs.get('use_native_query', None) - self.username = kwargs.get('username', None) + super(ODataLinkedService, self).__init__(**kwargs) + self.type = 'OData' # type: str + self.url = kwargs['url'] + self.authentication_type = kwargs.get('authentication_type', None) + self.user_name = kwargs.get('user_name', None) self.password = kwargs.get('password', None) - self.http_path = kwargs.get('http_path', None) - self.enable_ssl = kwargs.get('enable_ssl', None) - self.trusted_cert_path = kwargs.get('trusted_cert_path', None) - self.use_system_trust_store = kwargs.get('use_system_trust_store', None) - self.allow_host_name_cn_mismatch = kwargs.get('allow_host_name_cn_mismatch', None) - self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) + self.tenant = kwargs.get('tenant', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.aad_resource_id = kwargs.get('aad_resource_id', None) + self.aad_service_principal_credential_type = kwargs.get('aad_service_principal_credential_type', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.service_principal_embedded_cert = kwargs.get('service_principal_embedded_cert', None) + self.service_principal_embedded_cert_password = kwargs.get('service_principal_embedded_cert_password', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) -class HiveObjectDataset(Dataset): - """Hive Server dataset. +class ODataResourceDataset(Dataset): + """The Open Data Protocol (OData) resource dataset. All required parameters must be populated in order to send to Azure. @@ -9725,14 +19544,8 @@ class HiveObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table - properties instead. - :type table_name: object - :param table: The table name of the Hive. Type: string (or Expression with resultType string). - :type table: object - :param schema_type_properties_schema: The schema name of the Hive. Type: string (or Expression - with resultType string). - :type schema_type_properties_schema: object + :param path: The OData resource path. Type: string (or Expression with resultType string). + :type path: object """ _validation = { @@ -9750,111 +19563,66 @@ class HiveObjectDataset(Dataset): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'path': {'key': 'typeProperties.path', 'type': 'object'}, } def __init__( self, **kwargs ): - super(HiveObjectDataset, self).__init__(**kwargs) - self.type = 'HiveObject' - self.table_name = kwargs.get('table_name', None) - self.table = kwargs.get('table', None) - self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) + super(ODataResourceDataset, self).__init__(**kwargs) + self.type = 'ODataResource' # type: str + self.path = kwargs.get('path', None) -class HttpLinkedService(LinkedService): - """Linked service for an HTTP source. +class ODataSource(CopySource): + """A copy activity source for OData source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param url: Required. The base URL of the HTTP endpoint, e.g. http://www.microsoft.com. Type: - string (or Expression with resultType string). - :type url: object - :param authentication_type: The authentication type to be used to connect to the HTTP server. - Possible values include: "Basic", "Anonymous", "Digest", "Windows", "ClientCertificate". - :type authentication_type: str or ~azure.synapse.artifacts.models.HttpAuthenticationType - :param user_name: User name for Basic, Digest, or Windows authentication. Type: string (or - Expression with resultType string). - :type user_name: object - :param password: Password for Basic, Digest, Windows, or ClientCertificate with - EmbeddedCertData authentication. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param embedded_cert_data: Base64 encoded certificate data for ClientCertificate - authentication. For on-premises copy with ClientCertificate authentication, either - CertThumbprint or EmbeddedCertData/Password should be specified. Type: string (or Expression - with resultType string). - :type embedded_cert_data: object - :param cert_thumbprint: Thumbprint of certificate for ClientCertificate authentication. Only - valid for on-premises copy. For on-premises copy with ClientCertificate authentication, either - CertThumbprint or EmbeddedCertData/Password should be specified. Type: string (or Expression - with resultType string). - :type cert_thumbprint: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - :param enable_server_certificate_validation: If true, validate the HTTPS server SSL - certificate. Default value is true. Type: boolean (or Expression with resultType boolean). - :type enable_server_certificate_validation: object + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query: OData query. For example, "$top=1". Type: string (or Expression with resultType + string). + :type query: object """ _validation = { 'type': {'required': True}, - 'url': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'embedded_cert_data': {'key': 'typeProperties.embeddedCertData', 'type': 'object'}, - 'cert_thumbprint': {'key': 'typeProperties.certThumbprint', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, } def __init__( self, **kwargs ): - super(HttpLinkedService, self).__init__(**kwargs) - self.type = 'HttpServer' - self.url = kwargs['url'] - self.authentication_type = kwargs.get('authentication_type', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.embedded_cert_data = kwargs.get('embedded_cert_data', None) - self.cert_thumbprint = kwargs.get('cert_thumbprint', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.enable_server_certificate_validation = kwargs.get('enable_server_certificate_validation', None) + super(ODataSource, self).__init__(**kwargs) + self.type = 'ODataSource' # type: str + self.query = kwargs.get('query', None) -class HubspotLinkedService(LinkedService): - """Hubspot Service linked service. +class OdbcLinkedService(LinkedService): + """Open Database Connectivity (ODBC) linked service. All required parameters must be populated in order to send to Azure. @@ -9871,26 +19639,21 @@ class HubspotLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param client_id: Required. The client ID associated with your Hubspot application. - :type client_id: object - :param client_secret: The client secret associated with your Hubspot application. - :type client_secret: ~azure.synapse.artifacts.models.SecretBase - :param access_token: The access token obtained when initially authenticating your OAuth - integration. - :type access_token: ~azure.synapse.artifacts.models.SecretBase - :param refresh_token: The refresh token obtained when initially authenticating your OAuth - integration. - :type refresh_token: ~azure.synapse.artifacts.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using - HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name in the server's - certificate to match the host name of the server when connecting over SSL. The default value is - true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of the server when - connecting over SSL. The default value is true. - :type use_peer_verification: object + :param connection_string: Required. The non-access credential portion of the connection string + as well as an optional encrypted credential. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param authentication_type: Type of authentication used to connect to the ODBC data store. + Possible values are: Anonymous and Basic. Type: string (or Expression with resultType string). + :type authentication_type: object + :param credential: The access credential portion of the connection string specified in driver- + specific property-value format. + :type credential: ~azure.synapse.artifacts.models.SecretBase + :param user_name: User name for Basic authentication. Type: string (or Expression with + resultType string). + :type user_name: object + :param password: Password for Basic authentication. + :type password: ~azure.synapse.artifacts.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -9899,7 +19662,7 @@ class HubspotLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'client_id': {'required': True}, + 'connection_string': {'required': True}, } _attribute_map = { @@ -9909,13 +19672,11 @@ class HubspotLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, - 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -9923,236 +19684,179 @@ def __init__( self, **kwargs ): - super(HubspotLinkedService, self).__init__(**kwargs) - self.type = 'Hubspot' - self.client_id = kwargs['client_id'] - self.client_secret = kwargs.get('client_secret', None) - self.access_token = kwargs.get('access_token', None) - self.refresh_token = kwargs.get('refresh_token', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) + super(OdbcLinkedService, self).__init__(**kwargs) + self.type = 'Odbc' # type: str + self.connection_string = kwargs['connection_string'] + self.authentication_type = kwargs.get('authentication_type', None) + self.credential = kwargs.get('credential', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) -class HubspotObjectDataset(Dataset): - """Hubspot Service dataset. +class OdbcSink(CopySink): + """A copy activity ODBC sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Copy sink type.Constant filled by server. :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param pre_copy_script: A query to execute before starting the copy. Type: string (or + Expression with resultType string). + :type pre_copy_script: object """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } def __init__( self, **kwargs ): - super(HubspotObjectDataset, self).__init__(**kwargs) - self.type = 'HubspotObject' - self.table_name = kwargs.get('table_name', None) + super(OdbcSink, self).__init__(**kwargs) + self.type = 'OdbcSink' # type: str + self.pre_copy_script = kwargs.get('pre_copy_script', None) -class IfConditionActivity(Activity): - """This activity evaluates a boolean expression and executes either the activities under the ifTrueActivities property or the ifFalseActivities property depending on the result of the expression. +class OdbcSource(TabularSource): + """A copy activity source for ODBC databases. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param expression: Required. An expression that would evaluate to Boolean. This is used to - determine the block of activities (ifTrueActivities or ifFalseActivities) that will be - executed. - :type expression: ~azure.synapse.artifacts.models.Expression - :param if_true_activities: List of activities to execute if expression is evaluated to true. - This is an optional property and if not provided, the activity will exit without any action. - :type if_true_activities: list[~azure.synapse.artifacts.models.Activity] - :param if_false_activities: List of activities to execute if expression is evaluated to false. - This is an optional property and if not provided, the activity will exit without any action. - :type if_false_activities: list[~azure.synapse.artifacts.models.Activity] + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: Database query. Type: string (or Expression with resultType string). + :type query: object """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, - 'expression': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'expression': {'key': 'typeProperties.expression', 'type': 'Expression'}, - 'if_true_activities': {'key': 'typeProperties.ifTrueActivities', 'type': '[Activity]'}, - 'if_false_activities': {'key': 'typeProperties.ifFalseActivities', 'type': '[Activity]'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, } def __init__( self, **kwargs ): - super(IfConditionActivity, self).__init__(**kwargs) - self.type = 'IfCondition' - self.expression = kwargs['expression'] - self.if_true_activities = kwargs.get('if_true_activities', None) - self.if_false_activities = kwargs.get('if_false_activities', None) + super(OdbcSource, self).__init__(**kwargs) + self.type = 'OdbcSource' # type: str + self.query = kwargs.get('query', None) -class ImpalaLinkedService(LinkedService): - """Impala server linked service. +class OdbcTableDataset(Dataset): + """The ODBC table dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Type of dataset.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. + :param description: Dataset description. :type description: str - :param parameters: Parameters for linked service. + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. + :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] - :param host: Required. The IP address or host name of the Impala server. (i.e. - 192.168.222.160). - :type host: object - :param port: The TCP port that the Impala server uses to listen for client connections. The - default value is 21050. - :type port: object - :param authentication_type: Required. The authentication type to use. Possible values include: - "Anonymous", "SASLUsername", "UsernameAndPassword". - :type authentication_type: str or ~azure.synapse.artifacts.models.ImpalaAuthenticationType - :param username: The user name used to access the Impala server. The default value is anonymous - when using SASLUsername. - :type username: object - :param password: The password corresponding to the user name when using UsernameAndPassword. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The - default value is false. - :type enable_ssl: object - :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for - verifying the server when connecting over SSL. This property can only be set when using SSL on - self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust - store or from a specified PEM file. The default value is false. - :type use_system_trust_store: object - :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate - name to match the host name of the server when connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: object - :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from - the server. The default value is false. - :type allow_self_signed_server_cert: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: The ODBC table name. Type: string (or Expression with resultType string). + :type table_name: object """ _validation = { 'type': {'required': True}, - 'host': {'required': True}, - 'authentication_type': {'required': True}, + 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, - 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, - 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } def __init__( self, **kwargs ): - super(ImpalaLinkedService, self).__init__(**kwargs) - self.type = 'Impala' - self.host = kwargs['host'] - self.port = kwargs.get('port', None) - self.authentication_type = kwargs['authentication_type'] - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.enable_ssl = kwargs.get('enable_ssl', None) - self.trusted_cert_path = kwargs.get('trusted_cert_path', None) - self.use_system_trust_store = kwargs.get('use_system_trust_store', None) - self.allow_host_name_cn_mismatch = kwargs.get('allow_host_name_cn_mismatch', None) - self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(OdbcTableDataset, self).__init__(**kwargs) + self.type = 'OdbcTable' # type: str + self.table_name = kwargs.get('table_name', None) -class ImpalaObjectDataset(Dataset): - """Impala server dataset. +class Office365Dataset(Dataset): + """The Office365 account. All required parameters must be populated in order to send to Azure. @@ -10178,20 +19882,18 @@ class ImpalaObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table - properties instead. - :type table_name: object - :param table: The table name of the Impala. Type: string (or Expression with resultType - string). - :type table: object - :param schema_type_properties_schema: The schema name of the Impala. Type: string (or + :param table_name: Required. Name of the dataset to extract from Office 365. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: object + :type table_name: object + :param predicate: A predicate expression that can be used to filter the specific rows to + extract from Office 365. Type: string (or Expression with resultType string). + :type predicate: object """ _validation = { 'type': {'required': True}, 'linked_service_name': {'required': True}, + 'table_name': {'required': True}, } _attribute_map = { @@ -10205,23 +19907,21 @@ class ImpalaObjectDataset(Dataset): 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'predicate': {'key': 'typeProperties.predicate', 'type': 'object'}, } def __init__( self, **kwargs ): - super(ImpalaObjectDataset, self).__init__(**kwargs) - self.type = 'ImpalaObject' - self.table_name = kwargs.get('table_name', None) - self.table = kwargs.get('table', None) - self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) + super(Office365Dataset, self).__init__(**kwargs) + self.type = 'Office365Table' # type: str + self.table_name = kwargs['table_name'] + self.predicate = kwargs.get('predicate', None) -class InformixLinkedService(LinkedService): - """Informix linked service. +class Office365LinkedService(LinkedService): + """Office365 linked service. All required parameters must be populated in order to send to Azure. @@ -10238,22 +19938,17 @@ class InformixLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param connection_string: Required. The non-access credential portion of the connection string - as well as an optional encrypted credential. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param authentication_type: Type of authentication used to connect to the Informix as ODBC data - store. Possible values are: Anonymous and Basic. Type: string (or Expression with resultType - string). - :type authentication_type: object - :param credential: The access credential portion of the connection string specified in driver- - specific property-value format. - :type credential: ~azure.synapse.artifacts.models.SecretBase - :param user_name: User name for Basic authentication. Type: string (or Expression with - resultType string). - :type user_name: object - :param password: Password for Basic authentication. - :type password: ~azure.synapse.artifacts.models.SecretBase + :param office365_tenant_id: Required. Azure tenant ID to which the Office 365 account belongs. + Type: string (or Expression with resultType string). + :type office365_tenant_id: object + :param service_principal_tenant_id: Required. Specify the tenant information under which your + Azure AD web application resides. Type: string (or Expression with resultType string). + :type service_principal_tenant_id: object + :param service_principal_id: Required. Specify the application's client ID. Type: string (or + Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: Required. Specify the application's key. + :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -10262,7 +19957,10 @@ class InformixLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'connection_string': {'required': True}, + 'office365_tenant_id': {'required': True}, + 'service_principal_tenant_id': {'required': True}, + 'service_principal_id': {'required': True}, + 'service_principal_key': {'required': True}, } _attribute_map = { @@ -10272,11 +19970,10 @@ class InformixLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, - 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'office365_tenant_id': {'key': 'typeProperties.office365TenantId', 'type': 'object'}, + 'service_principal_tenant_id': {'key': 'typeProperties.servicePrincipalTenantId', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -10284,115 +19981,182 @@ def __init__( self, **kwargs ): - super(InformixLinkedService, self).__init__(**kwargs) - self.type = 'Informix' - self.connection_string = kwargs['connection_string'] - self.authentication_type = kwargs.get('authentication_type', None) - self.credential = kwargs.get('credential', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(Office365LinkedService, self).__init__(**kwargs) + self.type = 'Office365' # type: str + self.office365_tenant_id = kwargs['office365_tenant_id'] + self.service_principal_tenant_id = kwargs['service_principal_tenant_id'] + self.service_principal_id = kwargs['service_principal_id'] + self.service_principal_key = kwargs['service_principal_key'] + self.encrypted_credential = kwargs.get('encrypted_credential', None) + + +class Office365Source(CopySource): + """A copy activity source for an Office 365 service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param allowed_groups: The groups containing all the users. Type: array of strings (or + Expression with resultType array of strings). + :type allowed_groups: object + :param user_scope_filter_uri: The user scope uri. Type: string (or Expression with resultType + string). + :type user_scope_filter_uri: object + :param date_filter_column: The Column to apply the :code:`` and + :code:``. Type: string (or Expression with resultType string). + :type date_filter_column: object + :param start_time: Start time of the requested range for this dataset. Type: string (or + Expression with resultType string). + :type start_time: object + :param end_time: End time of the requested range for this dataset. Type: string (or Expression + with resultType string). + :type end_time: object + :param output_columns: The columns to be read out from the Office 365 table. Type: array of + objects (or Expression with resultType array of objects). Example: [ { "name": "Id" }, { + "name": "CreatedDateTime" } ]. + :type output_columns: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'allowed_groups': {'key': 'allowedGroups', 'type': 'object'}, + 'user_scope_filter_uri': {'key': 'userScopeFilterUri', 'type': 'object'}, + 'date_filter_column': {'key': 'dateFilterColumn', 'type': 'object'}, + 'start_time': {'key': 'startTime', 'type': 'object'}, + 'end_time': {'key': 'endTime', 'type': 'object'}, + 'output_columns': {'key': 'outputColumns', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(Office365Source, self).__init__(**kwargs) + self.type = 'Office365Source' # type: str + self.allowed_groups = kwargs.get('allowed_groups', None) + self.user_scope_filter_uri = kwargs.get('user_scope_filter_uri', None) + self.date_filter_column = kwargs.get('date_filter_column', None) + self.start_time = kwargs.get('start_time', None) + self.end_time = kwargs.get('end_time', None) + self.output_columns = kwargs.get('output_columns', None) -class InformixTableDataset(Dataset): - """The Informix table dataset. +class OracleLinkedService(LinkedService): + """Oracle database. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Type of linked service.Constant filled by server. :type type: str - :param description: Dataset description. + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. + :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. + :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The Informix table name. Type: string (or Expression with resultType - string). - :type table_name: object + :param connection_string: Required. The connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param password: The Azure key vault secret reference of password in connection string. + :type password: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, + 'connection_string': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, **kwargs ): - super(InformixTableDataset, self).__init__(**kwargs) - self.type = 'InformixTable' - self.table_name = kwargs.get('table_name', None) - - -class IntegrationRuntimeReference(msrest.serialization.Model): - """Integration runtime reference type. + super(OracleLinkedService, self).__init__(**kwargs) + self.type = 'Oracle' # type: str + self.connection_string = kwargs['connection_string'] + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. +class OraclePartitionSettings(msrest.serialization.Model): + """The settings that will be leveraged for Oracle source partitioning. - :ivar type: Required. Type of integration runtime. Default value: - "IntegrationRuntimeReference". - :vartype type: str - :param reference_name: Required. Reference integration runtime name. - :type reference_name: str - :param parameters: Arguments for integration runtime. - :type parameters: dict[str, object] + :param partition_names: Names of the physical partitions of Oracle table. + :type partition_names: object + :param partition_column_name: The name of the column in integer type that will be used for + proceeding range partitioning. Type: string (or Expression with resultType string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of column specified in partitionColumnName that + will be used for proceeding range partitioning. Type: string (or Expression with resultType + string). + :type partition_upper_bound: object + :param partition_lower_bound: The minimum value of column specified in partitionColumnName that + will be used for proceeding range partitioning. Type: string (or Expression with resultType + string). + :type partition_lower_bound: object """ - _validation = { - 'type': {'required': True, 'constant': True}, - 'reference_name': {'required': True}, - } - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'reference_name': {'key': 'referenceName', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{object}'}, + 'partition_names': {'key': 'partitionNames', 'type': 'object'}, + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, } - type = "IntegrationRuntimeReference" - def __init__( self, **kwargs ): - super(IntegrationRuntimeReference, self).__init__(**kwargs) - self.reference_name = kwargs['reference_name'] - self.parameters = kwargs.get('parameters', None) + super(OraclePartitionSettings, self).__init__(**kwargs) + self.partition_names = kwargs.get('partition_names', None) + self.partition_column_name = kwargs.get('partition_column_name', None) + self.partition_upper_bound = kwargs.get('partition_upper_bound', None) + self.partition_lower_bound = kwargs.get('partition_lower_bound', None) -class JiraLinkedService(LinkedService): - """Jira Service linked service. +class OracleServiceCloudLinkedService(LinkedService): + """Oracle Service Cloud linked service. All required parameters must be populated in order to send to Azure. @@ -10409,26 +20173,23 @@ class JiraLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param host: Required. The IP address or host name of the Jira service. (e.g. - jira.example.com). + :param host: Required. The URL of the Oracle Service Cloud instance. :type host: object - :param port: The TCP port that the Jira server uses to listen for client connections. The - default value is 443 if connecting through HTTPS, or 8080 if connecting through HTTP. - :type port: object - :param username: Required. The user name that you use to access Jira Service. + :param username: Required. The user name that you use to access Oracle Service Cloud server. :type username: object - :param password: The password corresponding to the user name that you provided in the username - field. + :param password: Required. The password corresponding to the user name that you provided in the + username key. :type password: ~azure.synapse.artifacts.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using - HTTPS. The default value is true. + HTTPS. The default value is true. Type: boolean (or Expression with resultType boolean). :type use_encrypted_endpoints: object :param use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is - true. + true. Type: boolean (or Expression with resultType boolean). :type use_host_verification: object :param use_peer_verification: Specifies whether to verify the identity of the server when - connecting over SSL. The default value is true. + connecting over SSL. The default value is true. Type: boolean (or Expression with resultType + boolean). :type use_peer_verification: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with @@ -10440,6 +20201,7 @@ class JiraLinkedService(LinkedService): 'type': {'required': True}, 'host': {'required': True}, 'username': {'required': True}, + 'password': {'required': True}, } _attribute_map = { @@ -10450,7 +20212,6 @@ class JiraLinkedService(LinkedService): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, @@ -10463,20 +20224,19 @@ def __init__( self, **kwargs ): - super(JiraLinkedService, self).__init__(**kwargs) - self.type = 'Jira' + super(OracleServiceCloudLinkedService, self).__init__(**kwargs) + self.type = 'OracleServiceCloud' # type: str self.host = kwargs['host'] - self.port = kwargs.get('port', None) self.username = kwargs['username'] - self.password = kwargs.get('password', None) + self.password = kwargs['password'] self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) self.use_host_verification = kwargs.get('use_host_verification', None) self.use_peer_verification = kwargs.get('use_peer_verification', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) -class JiraObjectDataset(Dataset): - """Jira Service dataset. +class OracleServiceCloudObjectDataset(Dataset): + """Oracle Service Cloud dataset. All required parameters must be populated in order to send to Azure. @@ -10528,390 +20288,471 @@ def __init__( self, **kwargs ): - super(JiraObjectDataset, self).__init__(**kwargs) - self.type = 'JiraObject' + super(OracleServiceCloudObjectDataset, self).__init__(**kwargs) + self.type = 'OracleServiceCloudObject' # type: str self.table_name = kwargs.get('table_name', None) -class JsonDataset(Dataset): - """Json dataset. +class OracleServiceCloudSource(TabularSource): + """A copy activity Oracle Service Cloud source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param location: The location of the json data storage. - :type location: ~azure.synapse.artifacts.models.DatasetLocation - :param encoding_name: The code page name of the preferred encoding. If not specified, the - default value is UTF-8, unless BOM denotes another Unicode encoding. Refer to the name column - of the table in the following link to set supported values: - https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with - resultType string). - :type encoding_name: object - :param compression: The data compression method used for the json dataset. - :type compression: ~azure.synapse.artifacts.models.DatasetCompression + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'encoding_name': {'key': 'typeProperties.encodingName', 'type': 'object'}, - 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, } def __init__( self, **kwargs ): - super(JsonDataset, self).__init__(**kwargs) - self.type = 'Json' - self.location = kwargs.get('location', None) - self.encoding_name = kwargs.get('encoding_name', None) - self.compression = kwargs.get('compression', None) + super(OracleServiceCloudSource, self).__init__(**kwargs) + self.type = 'OracleServiceCloudSource' # type: str + self.query = kwargs.get('query', None) -class LinkedServiceDebugResource(SubResourceDebugResource): - """Linked service debug resource. +class OracleSink(CopySink): + """A copy activity Oracle sink. All required parameters must be populated in order to send to Azure. - :param name: The resource name. - :type name: str - :param properties: Required. Properties of linked service. - :type properties: ~azure.synapse.artifacts.models.LinkedService + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType + string). + :type pre_copy_script: object """ _validation = { - 'properties': {'required': True}, + 'type': {'required': True}, } _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'LinkedService'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } def __init__( self, **kwargs ): - super(LinkedServiceDebugResource, self).__init__(**kwargs) - self.properties = kwargs['properties'] + super(OracleSink, self).__init__(**kwargs) + self.type = 'OracleSink' # type: str + self.pre_copy_script = kwargs.get('pre_copy_script', None) -class LinkedServiceListResponse(msrest.serialization.Model): - """A list of linked service resources. +class OracleSource(CopySource): + """A copy activity Oracle source. All required parameters must be populated in order to send to Azure. - :param value: Required. List of linked services. - :type value: list[~azure.synapse.artifacts.models.LinkedServiceResource] - :param next_link: The link to the next page of results, if any remaining results exist. - :type next_link: str + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param oracle_reader_query: Oracle reader query. Type: string (or Expression with resultType + string). + :type oracle_reader_query: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param partition_option: The partition mechanism that will be used for Oracle read in parallel. + Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". + :type partition_option: str or ~azure.synapse.artifacts.models.OraclePartitionOption + :param partition_settings: The settings that will be leveraged for Oracle source partitioning. + :type partition_settings: ~azure.synapse.artifacts.models.OraclePartitionSettings """ _validation = { - 'value': {'required': True}, + 'type': {'required': True}, } _attribute_map = { - 'value': {'key': 'value', 'type': '[LinkedServiceResource]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'oracle_reader_query': {'key': 'oracleReaderQuery', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'OraclePartitionSettings'}, } def __init__( self, **kwargs ): - super(LinkedServiceListResponse, self).__init__(**kwargs) - self.value = kwargs['value'] - self.next_link = kwargs.get('next_link', None) - + super(OracleSource, self).__init__(**kwargs) + self.type = 'OracleSource' # type: str + self.oracle_reader_query = kwargs.get('oracle_reader_query', None) + self.query_timeout = kwargs.get('query_timeout', None) + self.partition_option = kwargs.get('partition_option', None) + self.partition_settings = kwargs.get('partition_settings', None) -class LinkedServiceReference(msrest.serialization.Model): - """Linked service reference type. - Variables are only populated by the server, and will be ignored when sending a request. +class OracleTableDataset(Dataset): + """The on-premises Oracle database dataset. All required parameters must be populated in order to send to Azure. - :ivar type: Required. Linked service reference type. Default value: "LinkedServiceReference". - :vartype type: str - :param reference_name: Required. Reference LinkedService name. - :type reference_name: str - :param parameters: Arguments for LinkedService. - :type parameters: dict[str, object] + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: This property will be retired. Please consider using schema + table + properties instead. + :type table_name: object + :param schema_type_properties_schema: The schema name of the on-premises Oracle database. Type: + string (or Expression with resultType string). + :type schema_type_properties_schema: object + :param table: The table name of the on-premises Oracle database. Type: string (or Expression + with resultType string). + :type table: object """ _validation = { - 'type': {'required': True, 'constant': True}, - 'reference_name': {'required': True}, + 'type': {'required': True}, + 'linked_service_name': {'required': True}, } _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'reference_name': {'key': 'referenceName', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, } - type = "LinkedServiceReference" - def __init__( self, **kwargs ): - super(LinkedServiceReference, self).__init__(**kwargs) - self.reference_name = kwargs['reference_name'] - self.parameters = kwargs.get('parameters', None) - + super(OracleTableDataset, self).__init__(**kwargs) + self.type = 'OracleTable' # type: str + self.table_name = kwargs.get('table_name', None) + self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) + self.table = kwargs.get('table', None) -class LinkedServiceResource(SubResource): - """Linked service resource type. - Variables are only populated by the server, and will be ignored when sending a request. +class OrcDataset(Dataset): + """ORC dataset. All required parameters must be populated in order to send to Azure. - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - :param properties: Required. Properties of linked service. - :type properties: ~azure.synapse.artifacts.models.LinkedService + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param location: The location of the ORC data storage. + :type location: ~azure.synapse.artifacts.models.DatasetLocation + :param orc_compression_codec: Possible values include: "none", "zlib", "snappy". + :type orc_compression_codec: str or ~azure.synapse.artifacts.models.OrcCompressionCodec """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'properties': {'required': True}, + 'type': {'required': True}, + 'linked_service_name': {'required': True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'LinkedService'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'orc_compression_codec': {'key': 'typeProperties.orcCompressionCodec', 'type': 'str'}, } def __init__( self, **kwargs ): - super(LinkedServiceResource, self).__init__(**kwargs) - self.properties = kwargs['properties'] + super(OrcDataset, self).__init__(**kwargs) + self.type = 'Orc' # type: str + self.location = kwargs.get('location', None) + self.orc_compression_codec = kwargs.get('orc_compression_codec', None) -class LogStorageSettings(msrest.serialization.Model): - """Log storage settings. +class OrcFormat(DatasetStorageFormat): + """The data stored in Optimized Row Columnar (ORC) format. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param linked_service_name: Required. Log storage linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param path: The path to storage for storing detailed logs of activity execution. Type: string - (or Expression with resultType string). - :type path: object + :param type: Required. Type of dataset storage format.Constant filled by server. + :type type: str + :param serializer: Serializer. Type: string (or Expression with resultType string). + :type serializer: object + :param deserializer: Deserializer. Type: string (or Expression with resultType string). + :type deserializer: object """ _validation = { - 'linked_service_name': {'required': True}, + 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'path': {'key': 'path', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'serializer': {'key': 'serializer', 'type': 'object'}, + 'deserializer': {'key': 'deserializer', 'type': 'object'}, } def __init__( self, **kwargs ): - super(LogStorageSettings, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.linked_service_name = kwargs['linked_service_name'] - self.path = kwargs.get('path', None) + super(OrcFormat, self).__init__(**kwargs) + self.type = 'OrcFormat' # type: str -class LookupActivity(ExecutionActivity): - """Lookup activity. +class OrcSink(CopySink): + """A copy activity ORC sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. Copy sink type.Constant filled by server. :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param source: Required. Dataset-specific source properties, same as copy activity source. - :type source: ~azure.synapse.artifacts.models.CopySource - :param dataset: Required. Lookup activity dataset reference. - :type dataset: ~azure.synapse.artifacts.models.DatasetReference - :param first_row_only: Whether to return first row or all rows. Default value is true. Type: - boolean (or Expression with resultType boolean). - :type first_row_only: object + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param store_settings: ORC store settings. + :type store_settings: ~azure.synapse.artifacts.models.StoreWriteSettings """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, - 'source': {'required': True}, - 'dataset': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'source': {'key': 'typeProperties.source', 'type': 'CopySource'}, - 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, - 'first_row_only': {'key': 'typeProperties.firstRowOnly', 'type': 'object'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, } def __init__( self, **kwargs ): - super(LookupActivity, self).__init__(**kwargs) - self.type = 'Lookup' - self.source = kwargs['source'] - self.dataset = kwargs['dataset'] - self.first_row_only = kwargs.get('first_row_only', None) + super(OrcSink, self).__init__(**kwargs) + self.type = 'OrcSink' # type: str + self.store_settings = kwargs.get('store_settings', None) -class MagentoLinkedService(LinkedService): - """Magento server linked service. +class OrcSource(CopySource): + """A copy activity ORC source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param host: Required. The URL of the Magento instance. (i.e. 192.168.222.110/magento3). - :type host: object - :param access_token: The access token from Magento. - :type access_token: ~azure.synapse.artifacts.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using - HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name in the server's - certificate to match the host name of the server when connecting over SSL. The default value is - true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of the server when - connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param store_settings: ORC store settings. + :type store_settings: ~azure.synapse.artifacts.models.StoreReadSettings """ _validation = { 'type': {'required': True}, - 'host': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, } def __init__( self, **kwargs ): - super(MagentoLinkedService, self).__init__(**kwargs) - self.type = 'Magento' - self.host = kwargs['host'] - self.access_token = kwargs.get('access_token', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(OrcSource, self).__init__(**kwargs) + self.type = 'OrcSource' # type: str + self.store_settings = kwargs.get('store_settings', None) -class MagentoObjectDataset(Dataset): - """Magento server dataset. +class ParameterSpecification(msrest.serialization.Model): + """Definition of a single parameter for an entity. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Parameter type. Possible values include: "Object", "String", "Int", + "Float", "Bool", "Array", "SecureString". + :type type: str or ~azure.synapse.artifacts.models.ParameterType + :param default_value: Default value of parameter. + :type default_value: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'default_value': {'key': 'defaultValue', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(ParameterSpecification, self).__init__(**kwargs) + self.type = kwargs['type'] + self.default_value = kwargs.get('default_value', None) + + +class ParquetDataset(Dataset): + """Parquet dataset. All required parameters must be populated in order to send to Azure. @@ -10937,8 +20778,10 @@ class MagentoObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :param location: The location of the parquet storage. + :type location: ~azure.synapse.artifacts.models.DatasetLocation + :param compression_codec: Possible values include: "none", "gzip", "snappy", "lzo". + :type compression_codec: str or ~azure.synapse.artifacts.models.ParquetCompressionCodec """ _validation = { @@ -10956,40 +20799,34 @@ class MagentoObjectDataset(Dataset): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'str'}, } def __init__( self, **kwargs ): - super(MagentoObjectDataset, self).__init__(**kwargs) - self.type = 'MagentoObject' - self.table_name = kwargs.get('table_name', None) + super(ParquetDataset, self).__init__(**kwargs) + self.type = 'Parquet' # type: str + self.location = kwargs.get('location', None) + self.compression_codec = kwargs.get('compression_codec', None) -class MappingDataFlow(DataFlow): - """Mapping data flow. +class ParquetFormat(DatasetStorageFormat): + """The data stored in Parquet format. All required parameters must be populated in order to send to Azure. - :param type: Required. Type of data flow.Constant filled by server. + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage format.Constant filled by server. :type type: str - :param description: The description of the data flow. - :type description: str - :param annotations: List of tags that can be used for describing the data flow. - :type annotations: list[object] - :param folder: The folder that this data flow is in. If not specified, Data flow will appear at - the root level. - :type folder: ~azure.synapse.artifacts.models.DataFlowFolder - :param sources: List of sources in data flow. - :type sources: list[~azure.synapse.artifacts.models.DataFlowSource] - :param sinks: List of sinks in data flow. - :type sinks: list[~azure.synapse.artifacts.models.DataFlowSink] - :param transformations: List of transformations in data flow. - :type transformations: list[~azure.synapse.artifacts.models.Transformation] - :param script: DataFlow script. - :type script: str + :param serializer: Serializer. Type: string (or Expression with resultType string). + :type serializer: object + :param deserializer: Deserializer. Type: string (or Expression with resultType string). + :type deserializer: object """ _validation = { @@ -10997,55 +20834,47 @@ class MappingDataFlow(DataFlow): } _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DataFlowFolder'}, - 'sources': {'key': 'typeProperties.sources', 'type': '[DataFlowSource]'}, - 'sinks': {'key': 'typeProperties.sinks', 'type': '[DataFlowSink]'}, - 'transformations': {'key': 'typeProperties.transformations', 'type': '[Transformation]'}, - 'script': {'key': 'typeProperties.script', 'type': 'str'}, + 'serializer': {'key': 'serializer', 'type': 'object'}, + 'deserializer': {'key': 'deserializer', 'type': 'object'}, } def __init__( self, **kwargs ): - super(MappingDataFlow, self).__init__(**kwargs) - self.type = 'MappingDataFlow' - self.sources = kwargs.get('sources', None) - self.sinks = kwargs.get('sinks', None) - self.transformations = kwargs.get('transformations', None) - self.script = kwargs.get('script', None) + super(ParquetFormat, self).__init__(**kwargs) + self.type = 'ParquetFormat' # type: str -class MariaDBLinkedService(LinkedService): - """MariaDB server linked service. +class ParquetSink(CopySink): + """A copy activity Parquet sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Copy sink type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: An ODBC connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param pwd: The Azure key vault secret reference of password in connection string. - :type pwd: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param store_settings: Parquet store settings. + :type store_settings: ~azure.synapse.artifacts.models.StoreWriteSettings """ _validation = { @@ -11055,86 +20884,70 @@ class MariaDBLinkedService(LinkedService): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, } def __init__( self, **kwargs ): - super(MariaDBLinkedService, self).__init__(**kwargs) - self.type = 'MariaDB' - self.connection_string = kwargs.get('connection_string', None) - self.pwd = kwargs.get('pwd', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(ParquetSink, self).__init__(**kwargs) + self.type = 'ParquetSink' # type: str + self.store_settings = kwargs.get('store_settings', None) -class MariaDBTableDataset(Dataset): - """MariaDB server dataset. +class ParquetSource(CopySource): + """A copy activity Parquet source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param store_settings: Parquet store settings. + :type store_settings: ~azure.synapse.artifacts.models.StoreReadSettings """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, } def __init__( self, **kwargs ): - super(MariaDBTableDataset, self).__init__(**kwargs) - self.type = 'MariaDBTable' - self.table_name = kwargs.get('table_name', None) + super(ParquetSource, self).__init__(**kwargs) + self.type = 'ParquetSource' # type: str + self.store_settings = kwargs.get('store_settings', None) -class MarketoLinkedService(LinkedService): - """Marketo server linked service. +class PaypalLinkedService(LinkedService): + """Paypal Service linked service. All required parameters must be populated in order to send to Azure. @@ -11151,11 +20964,11 @@ class MarketoLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param endpoint: Required. The endpoint of the Marketo server. (i.e. 123-ABC-321.mktorest.com). - :type endpoint: object - :param client_id: Required. The client Id of your Marketo service. + :param host: Required. The URL of the PayPal instance. (i.e. api.sandbox.paypal.com). + :type host: object + :param client_id: Required. The client ID associated with your PayPal application. :type client_id: object - :param client_secret: The client secret of your Marketo service. + :param client_secret: The client secret associated with your PayPal application. :type client_secret: ~azure.synapse.artifacts.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. @@ -11175,7 +20988,7 @@ class MarketoLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'endpoint': {'required': True}, + 'host': {'required': True}, 'client_id': {'required': True}, } @@ -11186,7 +20999,7 @@ class MarketoLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, @@ -11199,9 +21012,9 @@ def __init__( self, **kwargs ): - super(MarketoLinkedService, self).__init__(**kwargs) - self.type = 'Marketo' - self.endpoint = kwargs['endpoint'] + super(PaypalLinkedService, self).__init__(**kwargs) + self.type = 'Paypal' # type: str + self.host = kwargs['host'] self.client_id = kwargs['client_id'] self.client_secret = kwargs.get('client_secret', None) self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) @@ -11210,8 +21023,8 @@ def __init__( self.encrypted_credential = kwargs.get('encrypted_credential', None) -class MarketoObjectDataset(Dataset): - """Marketo server dataset. +class PaypalObjectDataset(Dataset): + """Paypal Service dataset. All required parameters must be populated in order to send to Azure. @@ -11243,33 +21056,83 @@ class MarketoObjectDataset(Dataset): _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(PaypalObjectDataset, self).__init__(**kwargs) + self.type = 'PaypalObject' # type: str + self.table_name = kwargs.get('table_name', None) + + +class PaypalSource(TabularSource): + """A copy activity Paypal Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, } def __init__( self, **kwargs ): - super(MarketoObjectDataset, self).__init__(**kwargs) - self.type = 'MarketoObject' - self.table_name = kwargs.get('table_name', None) + super(PaypalSource, self).__init__(**kwargs) + self.type = 'PaypalSource' # type: str + self.query = kwargs.get('query', None) -class MicrosoftAccessLinkedService(LinkedService): - """Microsoft Access linked service. +class PhoenixLinkedService(LinkedService): + """Phoenix server linked service. All required parameters must be populated in order to send to Azure. @@ -11286,22 +21149,40 @@ class MicrosoftAccessLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param connection_string: Required. The non-access credential portion of the connection string - as well as an optional encrypted credential. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param authentication_type: Type of authentication used to connect to the Microsoft Access as - ODBC data store. Possible values are: Anonymous and Basic. Type: string (or Expression with - resultType string). - :type authentication_type: object - :param credential: The access credential portion of the connection string specified in driver- - specific property-value format. - :type credential: ~azure.synapse.artifacts.models.SecretBase - :param user_name: User name for Basic authentication. Type: string (or Expression with - resultType string). - :type user_name: object - :param password: Password for Basic authentication. + :param host: Required. The IP address or host name of the Phoenix server. (i.e. + 192.168.222.160). + :type host: object + :param port: The TCP port that the Phoenix server uses to listen for client connections. The + default value is 8765. + :type port: object + :param http_path: The partial URL corresponding to the Phoenix server. (i.e. + /gateway/sandbox/phoenix/version). The default value is hbasephoenix if using + WindowsAzureHDInsightService. + :type http_path: object + :param authentication_type: Required. The authentication mechanism used to connect to the + Phoenix server. Possible values include: "Anonymous", "UsernameAndPassword", + "WindowsAzureHDInsightService". + :type authentication_type: str or ~azure.synapse.artifacts.models.PhoenixAuthenticationType + :param username: The user name used to connect to the Phoenix server. + :type username: object + :param password: The password corresponding to the user name. :type password: ~azure.synapse.artifacts.models.SecretBase + :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The + default value is false. + :type enable_ssl: object + :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for + verifying the server when connecting over SSL. This property can only be set when using SSL on + self-hosted IR. The default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust + store or from a specified PEM file. The default value is false. + :type use_system_trust_store: object + :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate + name to match the host name of the server when connecting over SSL. The default value is false. + :type allow_host_name_cn_mismatch: object + :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from + the server. The default value is false. + :type allow_self_signed_server_cert: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -11310,7 +21191,8 @@ class MicrosoftAccessLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'connection_string': {'required': True}, + 'host': {'required': True}, + 'authentication_type': {'required': True}, } _attribute_map = { @@ -11320,11 +21202,17 @@ class MicrosoftAccessLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, - 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, + 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -11332,18 +21220,24 @@ def __init__( self, **kwargs ): - super(MicrosoftAccessLinkedService, self).__init__(**kwargs) - self.type = 'MicrosoftAccess' - self.connection_string = kwargs['connection_string'] - self.authentication_type = kwargs.get('authentication_type', None) - self.credential = kwargs.get('credential', None) - self.user_name = kwargs.get('user_name', None) + super(PhoenixLinkedService, self).__init__(**kwargs) + self.type = 'Phoenix' # type: str + self.host = kwargs['host'] + self.port = kwargs.get('port', None) + self.http_path = kwargs.get('http_path', None) + self.authentication_type = kwargs['authentication_type'] + self.username = kwargs.get('username', None) self.password = kwargs.get('password', None) + self.enable_ssl = kwargs.get('enable_ssl', None) + self.trusted_cert_path = kwargs.get('trusted_cert_path', None) + self.use_system_trust_store = kwargs.get('use_system_trust_store', None) + self.allow_host_name_cn_mismatch = kwargs.get('allow_host_name_cn_mismatch', None) + self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) -class MicrosoftAccessTableDataset(Dataset): - """The Microsoft Access table dataset. +class PhoenixObjectDataset(Dataset): + """Phoenix server dataset. All required parameters must be populated in order to send to Azure. @@ -11369,9 +21263,15 @@ class MicrosoftAccessTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The Microsoft Access table name. Type: string (or Expression with resultType - string). + :param table_name: This property will be retired. Please consider using schema + table + properties instead. :type table_name: object + :param table: The table name of the Phoenix. Type: string (or Expression with resultType + string). + :type table: object + :param schema_type_properties_schema: The schema name of the Phoenix. Type: string (or + Expression with resultType string). + :type schema_type_properties_schema: object """ _validation = { @@ -11390,384 +21290,424 @@ class MicrosoftAccessTableDataset(Dataset): 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } def __init__( self, **kwargs ): - super(MicrosoftAccessTableDataset, self).__init__(**kwargs) - self.type = 'MicrosoftAccessTable' + super(PhoenixObjectDataset, self).__init__(**kwargs) + self.type = 'PhoenixObject' # type: str self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) -class MongoDbCollectionDataset(Dataset): - """The MongoDB database dataset. +class PhoenixSource(TabularSource): + """A copy activity Phoenix server source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param collection_name: Required. The table name of the MongoDB database. Type: string (or - Expression with resultType string). - :type collection_name: object + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, - 'collection_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'collection_name': {'key': 'typeProperties.collectionName', 'type': 'object'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(PhoenixSource, self).__init__(**kwargs) + self.type = 'PhoenixSource' # type: str + self.query = kwargs.get('query', None) + + +class PipelineFolder(msrest.serialization.Model): + """The folder that this Pipeline is in. If not specified, Pipeline will appear at the root level. + + :param name: The name of the folder that this Pipeline is in. + :type name: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(PipelineFolder, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + + +class PipelineListResponse(msrest.serialization.Model): + """A list of pipeline resources. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of pipelines. + :type value: list[~azure.synapse.artifacts.models.PipelineResource] + :param next_link: The link to the next page of results, if any remaining results exist. + :type next_link: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[PipelineResource]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(PipelineListResponse, self).__init__(**kwargs) + self.value = kwargs['value'] + self.next_link = kwargs.get('next_link', None) + + +class PipelineReference(msrest.serialization.Model): + """Pipeline reference type. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Pipeline reference type. Possible values include: "PipelineReference". + :type type: str or ~azure.synapse.artifacts.models.PipelineReferenceType + :param reference_name: Required. Reference pipeline name. + :type reference_name: str + :param name: Reference name. + :type name: str + """ + + _validation = { + 'type': {'required': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, } def __init__( self, **kwargs ): - super(MongoDbCollectionDataset, self).__init__(**kwargs) - self.type = 'MongoDbCollection' - self.collection_name = kwargs['collection_name'] + super(PipelineReference, self).__init__(**kwargs) + self.type = kwargs['type'] + self.reference_name = kwargs['reference_name'] + self.name = kwargs.get('name', None) -class MongoDbLinkedService(LinkedService): - """Linked service for MongoDb data source. +class PipelineResource(AzureEntityResource): + """Pipeline resource type. - All required parameters must be populated in order to send to Azure. + Variables are only populated by the server, and will be ignored when sending a request. + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + :ivar etag: Resource Etag. + :vartype etag: str :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. + :param description: The description of the pipeline. :type description: str - :param parameters: Parameters for linked service. + :param activities: List of activities in pipeline. + :type activities: list[~azure.synapse.artifacts.models.Activity] + :param parameters: List of parameters for pipeline. :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. + :param variables: List of variables for pipeline. + :type variables: dict[str, ~azure.synapse.artifacts.models.VariableSpecification] + :param concurrency: The max number of concurrent runs for the pipeline. + :type concurrency: int + :param annotations: List of tags that can be used for describing the Pipeline. :type annotations: list[object] - :param server: Required. The IP address or server name of the MongoDB server. Type: string (or - Expression with resultType string). - :type server: object - :param authentication_type: The authentication type to be used to connect to the MongoDB - database. Possible values include: "Basic", "Anonymous". - :type authentication_type: str or ~azure.synapse.artifacts.models.MongoDbAuthenticationType - :param database_name: Required. The name of the MongoDB database that you want to access. Type: - string (or Expression with resultType string). - :type database_name: object - :param username: Username for authentication. Type: string (or Expression with resultType - string). - :type username: object - :param password: Password for authentication. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param auth_source: Database to verify the username and password. Type: string (or Expression - with resultType string). - :type auth_source: object - :param port: The TCP port number that the MongoDB server uses to listen for client connections. - The default value is 27017. Type: integer (or Expression with resultType integer), minimum: 0. - :type port: object - :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The - default value is false. Type: boolean (or Expression with resultType boolean). - :type enable_ssl: object - :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from - the server. The default value is false. Type: boolean (or Expression with resultType boolean). - :type allow_self_signed_server_cert: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param run_dimensions: Dimensions emitted by Pipeline. + :type run_dimensions: dict[str, object] + :param folder: The folder that this Pipeline is in. If not specified, Pipeline will appear at + the root level. + :type folder: ~azure.synapse.artifacts.models.PipelineFolder """ _validation = { - 'type': {'required': True}, - 'server': {'required': True}, - 'database_name': {'required': True}, + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'concurrency': {'minimum': 1}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'database_name': {'key': 'typeProperties.databaseName', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'auth_source': {'key': 'typeProperties.authSource', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'properties.description', 'type': 'str'}, + 'activities': {'key': 'properties.activities', 'type': '[Activity]'}, + 'parameters': {'key': 'properties.parameters', 'type': '{ParameterSpecification}'}, + 'variables': {'key': 'properties.variables', 'type': '{VariableSpecification}'}, + 'concurrency': {'key': 'properties.concurrency', 'type': 'int'}, + 'annotations': {'key': 'properties.annotations', 'type': '[object]'}, + 'run_dimensions': {'key': 'properties.runDimensions', 'type': '{object}'}, + 'folder': {'key': 'properties.folder', 'type': 'PipelineFolder'}, } def __init__( self, **kwargs ): - super(MongoDbLinkedService, self).__init__(**kwargs) - self.type = 'MongoDb' - self.server = kwargs['server'] - self.authentication_type = kwargs.get('authentication_type', None) - self.database_name = kwargs['database_name'] - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.auth_source = kwargs.get('auth_source', None) - self.port = kwargs.get('port', None) - self.enable_ssl = kwargs.get('enable_ssl', None) - self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(PipelineResource, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.description = kwargs.get('description', None) + self.activities = kwargs.get('activities', None) + self.parameters = kwargs.get('parameters', None) + self.variables = kwargs.get('variables', None) + self.concurrency = kwargs.get('concurrency', None) + self.annotations = kwargs.get('annotations', None) + self.run_dimensions = kwargs.get('run_dimensions', None) + self.folder = kwargs.get('folder', None) -class MongoDbV2CollectionDataset(Dataset): - """The MongoDB database dataset. +class PipelineRun(msrest.serialization.Model): + """Information about a pipeline run. - All required parameters must be populated in order to send to Azure. + Variables are only populated by the server, and will be ignored when sending a request. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param collection: Required. The collection name of the MongoDB database. Type: string (or - Expression with resultType string). - :type collection: object + :ivar run_id: Identifier of a run. + :vartype run_id: str + :ivar run_group_id: Identifier that correlates all the recovery runs of a pipeline run. + :vartype run_group_id: str + :ivar is_latest: Indicates if the recovered pipeline run is the latest in its group. + :vartype is_latest: bool + :ivar pipeline_name: The pipeline name. + :vartype pipeline_name: str + :ivar parameters: The full or partial list of parameter name, value pair used in the pipeline + run. + :vartype parameters: dict[str, str] + :ivar invoked_by: Entity that started the pipeline run. + :vartype invoked_by: ~azure.synapse.artifacts.models.PipelineRunInvokedBy + :ivar last_updated: The last updated timestamp for the pipeline run event in ISO8601 format. + :vartype last_updated: ~datetime.datetime + :ivar run_start: The start time of a pipeline run in ISO8601 format. + :vartype run_start: ~datetime.datetime + :ivar run_end: The end time of a pipeline run in ISO8601 format. + :vartype run_end: ~datetime.datetime + :ivar duration_in_ms: The duration of a pipeline run. + :vartype duration_in_ms: int + :ivar status: The status of a pipeline run. + :vartype status: str + :ivar message: The message from a pipeline run. + :vartype message: str """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - 'collection': {'required': True}, + 'run_id': {'readonly': True}, + 'run_group_id': {'readonly': True}, + 'is_latest': {'readonly': True}, + 'pipeline_name': {'readonly': True}, + 'parameters': {'readonly': True}, + 'invoked_by': {'readonly': True}, + 'last_updated': {'readonly': True}, + 'run_start': {'readonly': True}, + 'run_end': {'readonly': True}, + 'duration_in_ms': {'readonly': True}, + 'status': {'readonly': True}, + 'message': {'readonly': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, + 'run_id': {'key': 'runId', 'type': 'str'}, + 'run_group_id': {'key': 'runGroupId', 'type': 'str'}, + 'is_latest': {'key': 'isLatest', 'type': 'bool'}, + 'pipeline_name': {'key': 'pipelineName', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{str}'}, + 'invoked_by': {'key': 'invokedBy', 'type': 'PipelineRunInvokedBy'}, + 'last_updated': {'key': 'lastUpdated', 'type': 'iso-8601'}, + 'run_start': {'key': 'runStart', 'type': 'iso-8601'}, + 'run_end': {'key': 'runEnd', 'type': 'iso-8601'}, + 'duration_in_ms': {'key': 'durationInMs', 'type': 'int'}, + 'status': {'key': 'status', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, } def __init__( self, **kwargs ): - super(MongoDbV2CollectionDataset, self).__init__(**kwargs) - self.type = 'MongoDbV2Collection' - self.collection = kwargs['collection'] - - -class MongoDbV2LinkedService(LinkedService): - """Linked service for MongoDB data source. - - All required parameters must be populated in order to send to Azure. + super(PipelineRun, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.run_id = None + self.run_group_id = None + self.is_latest = None + self.pipeline_name = None + self.parameters = None + self.invoked_by = None + self.last_updated = None + self.run_start = None + self.run_end = None + self.duration_in_ms = None + self.status = None + self.message = None - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: Required. The MongoDB connection string. Type: string, SecureString - or AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param database: Required. The name of the MongoDB database that you want to access. Type: - string (or Expression with resultType string). - :type database: object + +class PipelineRunInvokedBy(msrest.serialization.Model): + """Provides entity name and id that started the pipeline run. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar name: Name of the entity that started the pipeline run. + :vartype name: str + :ivar id: The ID of the entity that started the run. + :vartype id: str + :ivar invoked_by_type: The type of the entity that started the run. + :vartype invoked_by_type: str """ _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - 'database': {'required': True}, + 'name': {'readonly': True}, + 'id': {'readonly': True}, + 'invoked_by_type': {'readonly': True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'name': {'key': 'name', 'type': 'str'}, + 'id': {'key': 'id', 'type': 'str'}, + 'invoked_by_type': {'key': 'invokedByType', 'type': 'str'}, } def __init__( self, **kwargs ): - super(MongoDbV2LinkedService, self).__init__(**kwargs) - self.type = 'MongoDbV2' - self.connection_string = kwargs['connection_string'] - self.database = kwargs['database'] - - -class Trigger(msrest.serialization.Model): - """Azure Synapse nested object which contains information about creating pipeline run. + super(PipelineRunInvokedBy, self).__init__(**kwargs) + self.name = None + self.id = None + self.invoked_by_type = None - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: MultiplePipelineTrigger, RerunTumblingWindowTrigger. - Variables are only populated by the server, and will be ignored when sending a request. +class PipelineRunsQueryResponse(msrest.serialization.Model): + """A list pipeline runs. All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Trigger type.Constant filled by server. - :type type: str - :param description: Trigger description. - :type description: str - :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are - called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". - :vartype runtime_state: str or ~azure.synapse.artifacts.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the trigger. - :type annotations: list[object] + :param value: Required. List of pipeline runs. + :type value: list[~azure.synapse.artifacts.models.PipelineRun] + :param continuation_token: The continuation token for getting the next page of results, if any + remaining results exist, null otherwise. + :type continuation_token: str """ _validation = { - 'type': {'required': True}, - 'runtime_state': {'readonly': True}, + 'value': {'required': True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - } - - _subtype_map = { - 'type': {'MultiplePipelineTrigger': 'MultiplePipelineTrigger', 'RerunTumblingWindowTrigger': 'RerunTumblingWindowTrigger'} + 'value': {'key': 'value', 'type': '[PipelineRun]'}, + 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, } def __init__( self, **kwargs ): - super(Trigger, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.type = 'Trigger' - self.description = kwargs.get('description', None) - self.runtime_state = None - self.annotations = kwargs.get('annotations', None) - - -class MultiplePipelineTrigger(Trigger): - """Base class for all triggers that support one to many model for trigger to pipeline. + super(PipelineRunsQueryResponse, self).__init__(**kwargs) + self.value = kwargs['value'] + self.continuation_token = kwargs.get('continuation_token', None) - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. +class PolybaseSettings(msrest.serialization.Model): + """PolyBase settings. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Trigger type.Constant filled by server. - :type type: str - :param description: Trigger description. - :type description: str - :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are - called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". - :vartype runtime_state: str or ~azure.synapse.artifacts.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the trigger. - :type annotations: list[object] - :param pipelines: Pipelines that need to be started. - :type pipelines: list[~azure.synapse.artifacts.models.TriggerPipelineReference] + :param reject_type: Reject type. Possible values include: "value", "percentage". + :type reject_type: str or ~azure.synapse.artifacts.models.PolybaseSettingsRejectType + :param reject_value: Specifies the value or the percentage of rows that can be rejected before + the query fails. Type: number (or Expression with resultType number), minimum: 0. + :type reject_value: object + :param reject_sample_value: Determines the number of rows to attempt to retrieve before the + PolyBase recalculates the percentage of rejected rows. Type: integer (or Expression with + resultType integer), minimum: 0. + :type reject_sample_value: object + :param use_type_default: Specifies how to handle missing values in delimited text files when + PolyBase retrieves data from the text file. Type: boolean (or Expression with resultType + boolean). + :type use_type_default: object """ - _validation = { - 'type': {'required': True}, - 'runtime_state': {'readonly': True}, - } - _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, + 'reject_type': {'key': 'rejectType', 'type': 'str'}, + 'reject_value': {'key': 'rejectValue', 'type': 'object'}, + 'reject_sample_value': {'key': 'rejectSampleValue', 'type': 'object'}, + 'use_type_default': {'key': 'useTypeDefault', 'type': 'object'}, } def __init__( self, **kwargs ): - super(MultiplePipelineTrigger, self).__init__(**kwargs) - self.type = 'MultiplePipelineTrigger' - self.pipelines = kwargs.get('pipelines', None) + super(PolybaseSettings, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.reject_type = kwargs.get('reject_type', None) + self.reject_value = kwargs.get('reject_value', None) + self.reject_sample_value = kwargs.get('reject_sample_value', None) + self.use_type_default = kwargs.get('use_type_default', None) -class MySqlLinkedService(LinkedService): - """Linked service for MySQL data source. +class PostgreSqlLinkedService(LinkedService): + """Linked service for PostgreSQL data source. All required parameters must be populated in order to send to Azure. @@ -11815,15 +21755,64 @@ def __init__( self, **kwargs ): - super(MySqlLinkedService, self).__init__(**kwargs) - self.type = 'MySql' + super(PostgreSqlLinkedService, self).__init__(**kwargs) + self.type = 'PostgreSql' # type: str self.connection_string = kwargs['connection_string'] self.password = kwargs.get('password', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) -class MySqlTableDataset(Dataset): - """The MySQL table dataset. +class PostgreSqlSource(TabularSource): + """A copy activity source for PostgreSQL databases. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: Database query. Type: string (or Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(PostgreSqlSource, self).__init__(**kwargs) + self.type = 'PostgreSqlSource' # type: str + self.query = kwargs.get('query', None) + + +class PostgreSqlTableDataset(Dataset): + """The PostgreSQL table dataset. All required parameters must be populated in order to send to Azure. @@ -11849,8 +21838,14 @@ class MySqlTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The MySQL table name. Type: string (or Expression with resultType string). + :param table_name: This property will be retired. Please consider using schema + table + properties instead. :type table_name: object + :param table: The PostgreSQL table name. Type: string (or Expression with resultType string). + :type table: object + :param schema_type_properties_schema: The PostgreSQL schema name. Type: string (or Expression + with resultType string). + :type schema_type_properties_schema: object """ _validation = { @@ -11869,19 +21864,23 @@ class MySqlTableDataset(Dataset): 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } def __init__( self, **kwargs ): - super(MySqlTableDataset, self).__init__(**kwargs) - self.type = 'MySqlTable' + super(PostgreSqlTableDataset, self).__init__(**kwargs) + self.type = 'PostgreSqlTable' # type: str self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) -class NetezzaLinkedService(LinkedService): - """Netezza linked service. +class PrestoLinkedService(LinkedService): + """Presto server linked service. All required parameters must be populated in order to send to Azure. @@ -11898,11 +21897,42 @@ class NetezzaLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param connection_string: An ODBC connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param pwd: The Azure key vault secret reference of password in connection string. - :type pwd: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param host: Required. The IP address or host name of the Presto server. (i.e. + 192.168.222.160). + :type host: object + :param server_version: Required. The version of the Presto server. (i.e. 0.148-t). + :type server_version: object + :param catalog: Required. The catalog context for all request against the server. + :type catalog: object + :param port: The TCP port that the Presto server uses to listen for client connections. The + default value is 8080. + :type port: object + :param authentication_type: Required. The authentication mechanism used to connect to the + Presto server. Possible values include: "Anonymous", "LDAP". + :type authentication_type: str or ~azure.synapse.artifacts.models.PrestoAuthenticationType + :param username: The user name used to connect to the Presto server. + :type username: object + :param password: The password corresponding to the user name. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The + default value is false. + :type enable_ssl: object + :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for + verifying the server when connecting over SSL. This property can only be set when using SSL on + self-hosted IR. The default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust + store or from a specified PEM file. The default value is false. + :type use_system_trust_store: object + :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate + name to match the host name of the server when connecting over SSL. The default value is false. + :type allow_host_name_cn_mismatch: object + :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from + the server. The default value is false. + :type allow_self_signed_server_cert: object + :param time_zone_id: The local time zone used by the connection. Valid values for this option + are specified in the IANA Time Zone Database. The default value is the system time zone. + :type time_zone_id: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -11911,6 +21941,10 @@ class NetezzaLinkedService(LinkedService): _validation = { 'type': {'required': True}, + 'host': {'required': True}, + 'server_version': {'required': True}, + 'catalog': {'required': True}, + 'authentication_type': {'required': True}, } _attribute_map = { @@ -11920,8 +21954,19 @@ class NetezzaLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'server_version': {'key': 'typeProperties.serverVersion', 'type': 'object'}, + 'catalog': {'key': 'typeProperties.catalog', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, + 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, + 'time_zone_id': {'key': 'typeProperties.timeZoneID', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -11929,15 +21974,26 @@ def __init__( self, **kwargs ): - super(NetezzaLinkedService, self).__init__(**kwargs) - self.type = 'Netezza' - self.connection_string = kwargs.get('connection_string', None) - self.pwd = kwargs.get('pwd', None) + super(PrestoLinkedService, self).__init__(**kwargs) + self.type = 'Presto' # type: str + self.host = kwargs['host'] + self.server_version = kwargs['server_version'] + self.catalog = kwargs['catalog'] + self.port = kwargs.get('port', None) + self.authentication_type = kwargs['authentication_type'] + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.enable_ssl = kwargs.get('enable_ssl', None) + self.trusted_cert_path = kwargs.get('trusted_cert_path', None) + self.use_system_trust_store = kwargs.get('use_system_trust_store', None) + self.allow_host_name_cn_mismatch = kwargs.get('allow_host_name_cn_mismatch', None) + self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) + self.time_zone_id = kwargs.get('time_zone_id', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) -class NetezzaTableDataset(Dataset): - """Netezza dataset. +class PrestoObjectDataset(Dataset): + """Presto server dataset. All required parameters must be populated in order to send to Azure. @@ -11966,10 +22022,10 @@ class NetezzaTableDataset(Dataset): :param table_name: This property will be retired. Please consider using schema + table properties instead. :type table_name: object - :param table: The table name of the Netezza. Type: string (or Expression with resultType + :param table: The table name of the Presto. Type: string (or Expression with resultType string). :type table: object - :param schema_type_properties_schema: The schema name of the Netezza. Type: string (or + :param schema_type_properties_schema: The schema name of the Presto. Type: string (or Expression with resultType string). :type schema_type_properties_schema: object """ @@ -11998,383 +22054,230 @@ def __init__( self, **kwargs ): - super(NetezzaTableDataset, self).__init__(**kwargs) - self.type = 'NetezzaTable' + super(PrestoObjectDataset, self).__init__(**kwargs) + self.type = 'PrestoObject' # type: str self.table_name = kwargs.get('table_name', None) self.table = kwargs.get('table', None) self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) -class Notebook(msrest.serialization.Model): - """Notebook. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param description: The description of the notebook. - :type description: str - :param big_data_pool: Big data pool reference. - :type big_data_pool: ~azure.synapse.artifacts.models.BigDataPoolReference - :param session_properties: Session properties. - :type session_properties: ~azure.synapse.artifacts.models.NotebookSessionProperties - :param metadata: Required. Notebook root-level metadata. - :type metadata: ~azure.synapse.artifacts.models.NotebookMetadata - :param nbformat: Required. Notebook format (major number). Incremented between backwards - incompatible changes to the notebook format. - :type nbformat: int - :param nbformat_minor: Required. Notebook format (minor number). Incremented for backward - compatible changes to the notebook format. - :type nbformat_minor: int - :param cells: Required. Array of cells of the current notebook. - :type cells: list[~azure.synapse.artifacts.models.NotebookCell] - """ - - _validation = { - 'metadata': {'required': True}, - 'nbformat': {'required': True}, - 'nbformat_minor': {'required': True}, - 'cells': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'big_data_pool': {'key': 'bigDataPool', 'type': 'BigDataPoolReference'}, - 'session_properties': {'key': 'sessionProperties', 'type': 'NotebookSessionProperties'}, - 'metadata': {'key': 'metadata', 'type': 'NotebookMetadata'}, - 'nbformat': {'key': 'nbformat', 'type': 'int'}, - 'nbformat_minor': {'key': 'nbformat_minor', 'type': 'int'}, - 'cells': {'key': 'cells', 'type': '[NotebookCell]'}, - } - - def __init__( - self, - **kwargs - ): - super(Notebook, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.description = kwargs.get('description', None) - self.big_data_pool = kwargs.get('big_data_pool', None) - self.session_properties = kwargs.get('session_properties', None) - self.metadata = kwargs['metadata'] - self.nbformat = kwargs['nbformat'] - self.nbformat_minor = kwargs['nbformat_minor'] - self.cells = kwargs['cells'] - - -class NotebookCell(msrest.serialization.Model): - """Notebook cell. +class PrestoSource(TabularSource): + """A copy activity Presto server source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param cell_type: Required. String identifying the type of cell. - :type cell_type: str - :param metadata: Required. Cell-level metadata. - :type metadata: object - :param source: Required. Contents of the cell, represented as an array of lines. - :type source: list[str] - :param attachments: Attachments associated with the cell. - :type attachments: object - :param outputs: Cell-level output items. - :type outputs: list[~azure.synapse.artifacts.models.NotebookCellOutputItem] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object """ _validation = { - 'cell_type': {'required': True}, - 'metadata': {'required': True}, - 'source': {'required': True}, + 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'cell_type': {'key': 'cell_type', 'type': 'str'}, - 'metadata': {'key': 'metadata', 'type': 'object'}, - 'source': {'key': 'source', 'type': '[str]'}, - 'attachments': {'key': 'attachments', 'type': 'object'}, - 'outputs': {'key': 'outputs', 'type': '[NotebookCellOutputItem]'}, - } - - def __init__( - self, - **kwargs - ): - super(NotebookCell, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.cell_type = kwargs['cell_type'] - self.metadata = kwargs['metadata'] - self.source = kwargs['source'] - self.attachments = kwargs.get('attachments', None) - self.outputs = kwargs.get('outputs', None) - - -class NotebookCellOutputItem(msrest.serialization.Model): - """An item of the notebook cell execution output. - - All required parameters must be populated in order to send to Azure. - - :param name: For output_type=stream, determines the name of stream (stdout / stderr). - :type name: str - :param execution_count: Execution sequence number. - :type execution_count: int - :param output_type: Required. Execution, display, or stream outputs. Possible values include: - "execute_result", "display_data", "stream", "error". - :type output_type: str or ~azure.synapse.artifacts.models.CellOutputType - :param text: For output_type=stream, the stream's text output, represented as a string or an - array of strings. - :type text: object - :param data: Output data. Use MIME type as key, and content as value. - :type data: object - :param metadata: Metadata for the output item. - :type metadata: object - """ - - _validation = { - 'output_type': {'required': True}, - } - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'execution_count': {'key': 'execution_count', 'type': 'int'}, - 'output_type': {'key': 'output_type', 'type': 'str'}, - 'text': {'key': 'text', 'type': 'object'}, - 'data': {'key': 'data', 'type': 'object'}, - 'metadata': {'key': 'metadata', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, } def __init__( self, **kwargs ): - super(NotebookCellOutputItem, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - self.execution_count = kwargs.get('execution_count', None) - self.output_type = kwargs['output_type'] - self.text = kwargs.get('text', None) - self.data = kwargs.get('data', None) - self.metadata = kwargs.get('metadata', None) + super(PrestoSource, self).__init__(**kwargs) + self.type = 'PrestoSource' # type: str + self.query = kwargs.get('query', None) -class NotebookKernelSpec(msrest.serialization.Model): - """Kernel information. +class PrivateEndpoint(msrest.serialization.Model): + """Private endpoint details. - All required parameters must be populated in order to send to Azure. + Variables are only populated by the server, and will be ignored when sending a request. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param name: Required. Name of the kernel specification. - :type name: str - :param display_name: Required. Name to display in UI. - :type display_name: str + :ivar id: Resource id of the private endpoint. + :vartype id: str """ _validation = { - 'name': {'required': True}, - 'display_name': {'required': True}, + 'id': {'readonly': True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'display_name': {'key': 'display_name', 'type': 'str'}, + 'id': {'key': 'id', 'type': 'str'}, } def __init__( self, **kwargs ): - super(NotebookKernelSpec, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.name = kwargs['name'] - self.display_name = kwargs['display_name'] + super(PrivateEndpoint, self).__init__(**kwargs) + self.id = None -class NotebookLanguageInfo(msrest.serialization.Model): - """Language info. +class PrivateEndpointConnection(Resource): + """A private endpoint connection. - All required parameters must be populated in order to send to Azure. + Variables are only populated by the server, and will be ignored when sending a request. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param name: Required. The programming language which this kernel runs. - :type name: str - :param codemirror_mode: The codemirror mode to use for code in this language. - :type codemirror_mode: str + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + :param private_endpoint: The private endpoint which the connection belongs to. + :type private_endpoint: ~azure.synapse.artifacts.models.PrivateEndpoint + :param private_link_service_connection_state: Connection state of the private endpoint + connection. + :type private_link_service_connection_state: + ~azure.synapse.artifacts.models.PrivateLinkServiceConnectionState + :ivar provisioning_state: Provisioning state of the private endpoint connection. + :vartype provisioning_state: str """ _validation = { - 'name': {'required': True}, + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'provisioning_state': {'readonly': True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, + 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, - 'codemirror_mode': {'key': 'codemirror_mode', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'private_endpoint': {'key': 'properties.privateEndpoint', 'type': 'PrivateEndpoint'}, + 'private_link_service_connection_state': {'key': 'properties.privateLinkServiceConnectionState', 'type': 'PrivateLinkServiceConnectionState'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, } def __init__( self, **kwargs ): - super(NotebookLanguageInfo, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.name = kwargs['name'] - self.codemirror_mode = kwargs.get('codemirror_mode', None) + super(PrivateEndpointConnection, self).__init__(**kwargs) + self.private_endpoint = kwargs.get('private_endpoint', None) + self.private_link_service_connection_state = kwargs.get('private_link_service_connection_state', None) + self.provisioning_state = None -class NotebookListResponse(msrest.serialization.Model): - """A list of Notebook resources. +class PrivateLinkServiceConnectionState(msrest.serialization.Model): + """Connection state details of the private endpoint. - All required parameters must be populated in order to send to Azure. + Variables are only populated by the server, and will be ignored when sending a request. - :param value: Required. List of Notebooks. - :type value: list[~azure.synapse.artifacts.models.NotebookResource] - :param next_link: The link to the next page of results, if any remaining results exist. - :type next_link: str + :param status: The private link service connection status. Possible values include: "Approved", + "Pending", "Rejected", "Disconnected". + :type status: str or ~azure.synapse.artifacts.models.PrivateLinkServiceConnectionStateStatus + :param description: The private link service connection description. + :type description: str + :ivar actions_required: The actions required for private link service connection. + :vartype actions_required: str """ _validation = { - 'value': {'required': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': '[NotebookResource]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'actions_required': {'readonly': True}, } - def __init__( - self, - **kwargs - ): - super(NotebookListResponse, self).__init__(**kwargs) - self.value = kwargs['value'] - self.next_link = kwargs.get('next_link', None) - - -class NotebookMetadata(msrest.serialization.Model): - """Notebook root-level metadata. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param kernelspec: Kernel information. - :type kernelspec: ~azure.synapse.artifacts.models.NotebookKernelSpec - :param language_info: Language info. - :type language_info: ~azure.synapse.artifacts.models.NotebookLanguageInfo - """ - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'kernelspec': {'key': 'kernelspec', 'type': 'NotebookKernelSpec'}, - 'language_info': {'key': 'language_info', 'type': 'NotebookLanguageInfo'}, + _attribute_map = { + 'status': {'key': 'status', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'actions_required': {'key': 'actionsRequired', 'type': 'str'}, } def __init__( self, **kwargs ): - super(NotebookMetadata, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.kernelspec = kwargs.get('kernelspec', None) - self.language_info = kwargs.get('language_info', None) + super(PrivateLinkServiceConnectionState, self).__init__(**kwargs) + self.status = kwargs.get('status', None) + self.description = kwargs.get('description', None) + self.actions_required = None -class NotebookResource(SubResource): - """Notebook resource type. +class ProxyResource(Resource): + """The resource model definition for a ARM proxy resource. It will have everything other than required location and tags. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. - - :ivar id: The resource identifier. + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. :vartype id: str - :ivar name: The resource name. + :ivar name: The name of the resource. :vartype name: str - :ivar type: The resource type. + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - :param properties: Required. Properties of Notebook. - :type properties: ~azure.synapse.artifacts.models.Notebook """ _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'properties': {'required': True}, } _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'Notebook'}, } def __init__( self, **kwargs ): - super(NotebookResource, self).__init__(**kwargs) - self.properties = kwargs['properties'] - + super(ProxyResource, self).__init__(**kwargs) -class NotebookSessionProperties(msrest.serialization.Model): - """Session properties. - All required parameters must be populated in order to send to Azure. +class QueryDataFlowDebugSessionsResponse(msrest.serialization.Model): + """A list of active debug sessions. - :param driver_memory: Required. Amount of memory to use for the driver process. - :type driver_memory: str - :param driver_cores: Required. Number of cores to use for the driver. - :type driver_cores: int - :param executor_memory: Required. Amount of memory to use per executor process. - :type executor_memory: str - :param executor_cores: Required. Number of cores to use for each executor. - :type executor_cores: int - :param num_executors: Required. Number of executors to launch for this session. - :type num_executors: int + :param value: Array with all active debug sessions. + :type value: list[~azure.synapse.artifacts.models.DataFlowDebugSessionInfo] + :param next_link: The link to the next page of results, if any remaining results exist. + :type next_link: str """ - _validation = { - 'driver_memory': {'required': True}, - 'driver_cores': {'required': True}, - 'executor_memory': {'required': True}, - 'executor_cores': {'required': True}, - 'num_executors': {'required': True}, - } - _attribute_map = { - 'driver_memory': {'key': 'driverMemory', 'type': 'str'}, - 'driver_cores': {'key': 'driverCores', 'type': 'int'}, - 'executor_memory': {'key': 'executorMemory', 'type': 'str'}, - 'executor_cores': {'key': 'executorCores', 'type': 'int'}, - 'num_executors': {'key': 'numExecutors', 'type': 'int'}, + 'value': {'key': 'value', 'type': '[DataFlowDebugSessionInfo]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, } def __init__( self, **kwargs ): - super(NotebookSessionProperties, self).__init__(**kwargs) - self.driver_memory = kwargs['driver_memory'] - self.driver_cores = kwargs['driver_cores'] - self.executor_memory = kwargs['executor_memory'] - self.executor_cores = kwargs['executor_cores'] - self.num_executors = kwargs['num_executors'] + super(QueryDataFlowDebugSessionsResponse, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + self.next_link = kwargs.get('next_link', None) -class ODataLinkedService(LinkedService): - """Open Data Protocol (OData) linked service. +class QuickBooksLinkedService(LinkedService): + """QuickBooks server linked service. All required parameters must be populated in order to send to Azure. @@ -12391,227 +22294,366 @@ class ODataLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param url: Required. The URL of the OData service endpoint. Type: string (or Expression with - resultType string). - :type url: object - :param authentication_type: Type of authentication used to connect to the OData service. - Possible values include: "Basic", "Anonymous", "Windows", "AadServicePrincipal", - "ManagedServiceIdentity". - :type authentication_type: str or ~azure.synapse.artifacts.models.ODataAuthenticationType - :param user_name: User name of the OData service. Type: string (or Expression with resultType - string). - :type user_name: object - :param password: Password of the OData service. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param tenant: Specify the tenant information (domain name or tenant ID) under which your - application resides. Type: string (or Expression with resultType string). - :type tenant: object - :param service_principal_id: Specify the application id of your application registered in Azure - Active Directory. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param aad_resource_id: Specify the resource you are requesting authorization to use Directory. - Type: string (or Expression with resultType string). - :type aad_resource_id: object - :param aad_service_principal_credential_type: Specify the credential type (key or cert) is used - for service principal. Possible values include: "ServicePrincipalKey", "ServicePrincipalCert". - :type aad_service_principal_credential_type: str or - ~azure.synapse.artifacts.models.ODataAadServicePrincipalCredentialType - :param service_principal_key: Specify the secret of your application registered in Azure Active - Directory. Type: string (or Expression with resultType string). - :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase - :param service_principal_embedded_cert: Specify the base64 encoded certificate of your - application registered in Azure Active Directory. Type: string (or Expression with resultType - string). - :type service_principal_embedded_cert: ~azure.synapse.artifacts.models.SecretBase - :param service_principal_embedded_cert_password: Specify the password of your certificate if - your certificate has a password and you are using AadServicePrincipal authentication. Type: - string (or Expression with resultType string). - :type service_principal_embedded_cert_password: ~azure.synapse.artifacts.models.SecretBase + :param endpoint: Required. The endpoint of the QuickBooks server. (i.e. + quickbooks.api.intuit.com). + :type endpoint: object + :param company_id: Required. The company ID of the QuickBooks company to authorize. + :type company_id: object + :param consumer_key: Required. The consumer key for OAuth 1.0 authentication. + :type consumer_key: object + :param consumer_secret: Required. The consumer secret for OAuth 1.0 authentication. + :type consumer_secret: ~azure.synapse.artifacts.models.SecretBase + :param access_token: Required. The access token for OAuth 1.0 authentication. + :type access_token: ~azure.synapse.artifacts.models.SecretBase + :param access_token_secret: Required. The access token secret for OAuth 1.0 authentication. + :type access_token_secret: ~azure.synapse.artifacts.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using + HTTPS. The default value is true. + :type use_encrypted_endpoints: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ - _validation = { - 'type': {'required': True}, - 'url': {'required': True}, + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + 'company_id': {'required': True}, + 'consumer_key': {'required': True}, + 'consumer_secret': {'required': True}, + 'access_token': {'required': True}, + 'access_token_secret': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'company_id': {'key': 'typeProperties.companyId', 'type': 'object'}, + 'consumer_key': {'key': 'typeProperties.consumerKey', 'type': 'object'}, + 'consumer_secret': {'key': 'typeProperties.consumerSecret', 'type': 'SecretBase'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'access_token_secret': {'key': 'typeProperties.accessTokenSecret', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(QuickBooksLinkedService, self).__init__(**kwargs) + self.type = 'QuickBooks' # type: str + self.endpoint = kwargs['endpoint'] + self.company_id = kwargs['company_id'] + self.consumer_key = kwargs['consumer_key'] + self.consumer_secret = kwargs['consumer_secret'] + self.access_token = kwargs['access_token'] + self.access_token_secret = kwargs['access_token_secret'] + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + + +class QuickBooksObjectDataset(Dataset): + """QuickBooks server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: The table name. Type: string (or Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(QuickBooksObjectDataset, self).__init__(**kwargs) + self.type = 'QuickBooksObject' # type: str + self.table_name = kwargs.get('table_name', None) + + +class QuickBooksSource(TabularSource): + """A copy activity QuickBooks server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(QuickBooksSource, self).__init__(**kwargs) + self.type = 'QuickBooksSource' # type: str + self.query = kwargs.get('query', None) + + +class RecurrenceSchedule(msrest.serialization.Model): + """The recurrence schedule. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param minutes: The minutes. + :type minutes: list[int] + :param hours: The hours. + :type hours: list[int] + :param week_days: The days of the week. + :type week_days: list[str or ~azure.synapse.artifacts.models.DayOfWeek] + :param month_days: The month days. + :type month_days: list[int] + :param monthly_occurrences: The monthly occurrences. + :type monthly_occurrences: list[~azure.synapse.artifacts.models.RecurrenceScheduleOccurrence] + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'minutes': {'key': 'minutes', 'type': '[int]'}, + 'hours': {'key': 'hours', 'type': '[int]'}, + 'week_days': {'key': 'weekDays', 'type': '[str]'}, + 'month_days': {'key': 'monthDays', 'type': '[int]'}, + 'monthly_occurrences': {'key': 'monthlyOccurrences', 'type': '[RecurrenceScheduleOccurrence]'}, } + def __init__( + self, + **kwargs + ): + super(RecurrenceSchedule, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.minutes = kwargs.get('minutes', None) + self.hours = kwargs.get('hours', None) + self.week_days = kwargs.get('week_days', None) + self.month_days = kwargs.get('month_days', None) + self.monthly_occurrences = kwargs.get('monthly_occurrences', None) + + +class RecurrenceScheduleOccurrence(msrest.serialization.Model): + """The recurrence schedule occurrence. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param day: The day of the week. Possible values include: "Sunday", "Monday", "Tuesday", + "Wednesday", "Thursday", "Friday", "Saturday". + :type day: str or ~azure.synapse.artifacts.models.DayOfWeek + :param occurrence: The occurrence. + :type occurrence: int + """ + _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, - 'aad_service_principal_credential_type': {'key': 'typeProperties.aadServicePrincipalCredentialType', 'type': 'str'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'service_principal_embedded_cert': {'key': 'typeProperties.servicePrincipalEmbeddedCert', 'type': 'SecretBase'}, - 'service_principal_embedded_cert_password': {'key': 'typeProperties.servicePrincipalEmbeddedCertPassword', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'day': {'key': 'day', 'type': 'str'}, + 'occurrence': {'key': 'occurrence', 'type': 'int'}, } def __init__( self, **kwargs ): - super(ODataLinkedService, self).__init__(**kwargs) - self.type = 'OData' - self.url = kwargs['url'] - self.authentication_type = kwargs.get('authentication_type', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.tenant = kwargs.get('tenant', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.aad_resource_id = kwargs.get('aad_resource_id', None) - self.aad_service_principal_credential_type = kwargs.get('aad_service_principal_credential_type', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.service_principal_embedded_cert = kwargs.get('service_principal_embedded_cert', None) - self.service_principal_embedded_cert_password = kwargs.get('service_principal_embedded_cert_password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(RecurrenceScheduleOccurrence, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.day = kwargs.get('day', None) + self.occurrence = kwargs.get('occurrence', None) -class ODataResourceDataset(Dataset): - """The Open Data Protocol (OData) resource dataset. +class RedirectIncompatibleRowSettings(msrest.serialization.Model): + """Redirect incompatible row settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param path: The OData resource path. Type: string (or Expression with resultType string). + :param linked_service_name: Required. Name of the Azure Storage, Storage SAS, or Azure Data + Lake Store linked service used for redirecting incompatible row. Must be specified if + redirectIncompatibleRowSettings is specified. Type: string (or Expression with resultType + string). + :type linked_service_name: object + :param path: The path for storing the redirect incompatible row data. Type: string (or + Expression with resultType string). :type path: object """ _validation = { - 'type': {'required': True}, 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'path': {'key': 'typeProperties.path', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'object'}, + 'path': {'key': 'path', 'type': 'object'}, } def __init__( self, **kwargs ): - super(ODataResourceDataset, self).__init__(**kwargs) - self.type = 'ODataResource' + super(RedirectIncompatibleRowSettings, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.linked_service_name = kwargs['linked_service_name'] self.path = kwargs.get('path', None) -class OdbcLinkedService(LinkedService): - """Open Database Connectivity (ODBC) linked service. +class RedshiftUnloadSettings(msrest.serialization.Model): + """The Amazon S3 settings needed for the interim Amazon S3 when copying from Amazon Redshift with unload. With this, data from Amazon Redshift source will be unloaded into S3 first and then copied into the targeted sink from the interim S3. + + All required parameters must be populated in order to send to Azure. + + :param s3_linked_service_name: Required. The name of the Amazon S3 linked service which will be + used for the unload operation when copying from the Amazon Redshift source. + :type s3_linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param bucket_name: Required. The bucket of the interim Amazon S3 which will be used to store + the unloaded data from Amazon Redshift source. The bucket must be in the same region as the + Amazon Redshift source. Type: string (or Expression with resultType string). + :type bucket_name: object + """ + + _validation = { + 's3_linked_service_name': {'required': True}, + 'bucket_name': {'required': True}, + } + + _attribute_map = { + 's3_linked_service_name': {'key': 's3LinkedServiceName', 'type': 'LinkedServiceReference'}, + 'bucket_name': {'key': 'bucketName', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(RedshiftUnloadSettings, self).__init__(**kwargs) + self.s3_linked_service_name = kwargs['s3_linked_service_name'] + self.bucket_name = kwargs['bucket_name'] + + +class RelationalSource(CopySource): + """A copy activity source for various relational databases. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: Required. The non-access credential portion of the connection string - as well as an optional encrypted credential. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param authentication_type: Type of authentication used to connect to the ODBC data store. - Possible values are: Anonymous and Basic. Type: string (or Expression with resultType string). - :type authentication_type: object - :param credential: The access credential portion of the connection string specified in driver- - specific property-value format. - :type credential: ~azure.synapse.artifacts.models.SecretBase - :param user_name: User name for Basic authentication. Type: string (or Expression with - resultType string). - :type user_name: object - :param password: Password for Basic authentication. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query: Database query. Type: string (or Expression with resultType string). + :type query: object """ _validation = { 'type': {'required': True}, - 'connection_string': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, - 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, } def __init__( self, **kwargs ): - super(OdbcLinkedService, self).__init__(**kwargs) - self.type = 'Odbc' - self.connection_string = kwargs['connection_string'] - self.authentication_type = kwargs.get('authentication_type', None) - self.credential = kwargs.get('credential', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(RelationalSource, self).__init__(**kwargs) + self.type = 'RelationalSource' # type: str + self.query = kwargs.get('query', None) -class OdbcTableDataset(Dataset): - """The ODBC table dataset. +class RelationalTableDataset(Dataset): + """The relational table dataset. All required parameters must be populated in order to send to Azure. @@ -12637,7 +22679,8 @@ class OdbcTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The ODBC table name. Type: string (or Expression with resultType string). + :param table_name: The relational table name. Type: string (or Expression with resultType + string). :type table_name: object """ @@ -12663,205 +22706,193 @@ def __init__( self, **kwargs ): - super(OdbcTableDataset, self).__init__(**kwargs) - self.type = 'OdbcTable' + super(RelationalTableDataset, self).__init__(**kwargs) + self.type = 'RelationalTable' # type: str self.table_name = kwargs.get('table_name', None) -class Office365Dataset(Dataset): - """The Office365 account. +class RerunTriggerListResponse(msrest.serialization.Model): + """A list of rerun triggers. + + Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: Required. Name of the dataset to extract from Office 365. Type: string (or - Expression with resultType string). - :type table_name: object - :param predicate: A predicate expression that can be used to filter the specific rows to - extract from Office 365. Type: string (or Expression with resultType string). - :type predicate: object + :param value: Required. List of rerun triggers. + :type value: list[~azure.synapse.artifacts.models.RerunTriggerResource] + :ivar next_link: The continuation token for getting the next page of results, if any remaining + results exist, null otherwise. + :vartype next_link: str + """ + + _validation = { + 'value': {'required': True}, + 'next_link': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[RerunTriggerResource]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(RerunTriggerListResponse, self).__init__(**kwargs) + self.value = kwargs['value'] + self.next_link = None + + +class RerunTriggerResource(AzureEntityResource): + """RerunTrigger resource type. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + :ivar etag: Resource Etag. + :vartype etag: str + :param properties: Required. Properties of the rerun trigger. + :type properties: ~azure.synapse.artifacts.models.RerunTumblingWindowTrigger """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - 'table_name': {'required': True}, + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'predicate': {'key': 'typeProperties.predicate', 'type': 'object'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'RerunTumblingWindowTrigger'}, } def __init__( self, **kwargs ): - super(Office365Dataset, self).__init__(**kwargs) - self.type = 'Office365Table' - self.table_name = kwargs['table_name'] - self.predicate = kwargs.get('predicate', None) + super(RerunTriggerResource, self).__init__(**kwargs) + self.properties = kwargs['properties'] -class Office365LinkedService(LinkedService): - """Office365 linked service. +class RerunTumblingWindowTrigger(Trigger): + """Trigger that schedules pipeline reruns for all fixed time interval windows from a requested start time to requested end time. + + Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Trigger type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. + :param description: Trigger description. :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. + :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are + called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". + :vartype runtime_state: str or ~azure.synapse.artifacts.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the trigger. :type annotations: list[object] - :param office365_tenant_id: Required. Azure tenant ID to which the Office 365 account belongs. - Type: string (or Expression with resultType string). - :type office365_tenant_id: object - :param service_principal_tenant_id: Required. Specify the tenant information under which your - Azure AD web application resides. Type: string (or Expression with resultType string). - :type service_principal_tenant_id: object - :param service_principal_id: Required. Specify the application's client ID. Type: string (or - Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: Required. Specify the application's key. - :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param parent_trigger: The parent trigger reference. + :type parent_trigger: object + :param requested_start_time: Required. The start time for the time period for which restatement + is initiated. Only UTC time is currently supported. + :type requested_start_time: ~datetime.datetime + :param requested_end_time: Required. The end time for the time period for which restatement is + initiated. Only UTC time is currently supported. + :type requested_end_time: ~datetime.datetime + :param max_concurrency: Required. The max number of parallel time windows (ready for execution) + for which a rerun is triggered. + :type max_concurrency: int """ _validation = { 'type': {'required': True}, - 'office365_tenant_id': {'required': True}, - 'service_principal_tenant_id': {'required': True}, - 'service_principal_id': {'required': True}, - 'service_principal_key': {'required': True}, + 'runtime_state': {'readonly': True}, + 'requested_start_time': {'required': True}, + 'requested_end_time': {'required': True}, + 'max_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'office365_tenant_id': {'key': 'typeProperties.office365TenantId', 'type': 'object'}, - 'service_principal_tenant_id': {'key': 'typeProperties.servicePrincipalTenantId', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'parent_trigger': {'key': 'typeProperties.parentTrigger', 'type': 'object'}, + 'requested_start_time': {'key': 'typeProperties.requestedStartTime', 'type': 'iso-8601'}, + 'requested_end_time': {'key': 'typeProperties.requestedEndTime', 'type': 'iso-8601'}, + 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, } def __init__( self, **kwargs ): - super(Office365LinkedService, self).__init__(**kwargs) - self.type = 'Office365' - self.office365_tenant_id = kwargs['office365_tenant_id'] - self.service_principal_tenant_id = kwargs['service_principal_tenant_id'] - self.service_principal_id = kwargs['service_principal_id'] - self.service_principal_key = kwargs['service_principal_key'] - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(RerunTumblingWindowTrigger, self).__init__(**kwargs) + self.type = 'RerunTumblingWindowTrigger' # type: str + self.parent_trigger = kwargs.get('parent_trigger', None) + self.requested_start_time = kwargs['requested_start_time'] + self.requested_end_time = kwargs['requested_end_time'] + self.max_concurrency = kwargs['max_concurrency'] -class OracleLinkedService(LinkedService): - """Oracle database. +class RerunTumblingWindowTriggerActionParameters(msrest.serialization.Model): + """Rerun tumbling window trigger Parameters. All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: Required. The connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param password: The Azure key vault secret reference of password in connection string. - :type password: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param start_time: Required. The start time for the time period for which restatement is + initiated. Only UTC time is currently supported. + :type start_time: ~datetime.datetime + :param end_time: Required. The end time for the time period for which restatement is initiated. + Only UTC time is currently supported. + :type end_time: ~datetime.datetime + :param max_concurrency: Required. The max number of parallel time windows (ready for execution) + for which a rerun is triggered. + :type max_concurrency: int """ _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, + 'start_time': {'required': True}, + 'end_time': {'required': True}, + 'max_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, + 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, + 'max_concurrency': {'key': 'maxConcurrency', 'type': 'int'}, } def __init__( self, **kwargs ): - super(OracleLinkedService, self).__init__(**kwargs) - self.type = 'Oracle' - self.connection_string = kwargs['connection_string'] - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(RerunTumblingWindowTriggerActionParameters, self).__init__(**kwargs) + self.start_time = kwargs['start_time'] + self.end_time = kwargs['end_time'] + self.max_concurrency = kwargs['max_concurrency'] -class OracleServiceCloudLinkedService(LinkedService): - """Oracle Service Cloud linked service. +class ResponsysLinkedService(LinkedService): + """Responsys linked service. All required parameters must be populated in order to send to Azure. @@ -12878,13 +22909,14 @@ class OracleServiceCloudLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param host: Required. The URL of the Oracle Service Cloud instance. - :type host: object - :param username: Required. The user name that you use to access Oracle Service Cloud server. - :type username: object - :param password: Required. The password corresponding to the user name that you provided in the - username key. - :type password: ~azure.synapse.artifacts.models.SecretBase + :param endpoint: Required. The endpoint of the Responsys server. + :type endpoint: object + :param client_id: Required. The client ID associated with the Responsys application. Type: + string (or Expression with resultType string). + :type client_id: object + :param client_secret: The client secret associated with the Responsys application. Type: string + (or Expression with resultType string). + :type client_secret: ~azure.synapse.artifacts.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. Type: boolean (or Expression with resultType boolean). :type use_encrypted_endpoints: object @@ -12904,9 +22936,8 @@ class OracleServiceCloudLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'host': {'required': True}, - 'username': {'required': True}, - 'password': {'required': True}, + 'endpoint': {'required': True}, + 'client_id': {'required': True}, } _attribute_map = { @@ -12916,9 +22947,9 @@ class OracleServiceCloudLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, @@ -12929,19 +22960,19 @@ def __init__( self, **kwargs ): - super(OracleServiceCloudLinkedService, self).__init__(**kwargs) - self.type = 'OracleServiceCloud' - self.host = kwargs['host'] - self.username = kwargs['username'] - self.password = kwargs['password'] + super(ResponsysLinkedService, self).__init__(**kwargs) + self.type = 'Responsys' # type: str + self.endpoint = kwargs['endpoint'] + self.client_id = kwargs['client_id'] + self.client_secret = kwargs.get('client_secret', None) self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) self.use_host_verification = kwargs.get('use_host_verification', None) self.use_peer_verification = kwargs.get('use_peer_verification', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) -class OracleServiceCloudObjectDataset(Dataset): - """Oracle Service Cloud dataset. +class ResponsysObjectDataset(Dataset): + """Responsys dataset. All required parameters must be populated in order to send to Azure. @@ -12993,13 +23024,63 @@ def __init__( self, **kwargs ): - super(OracleServiceCloudObjectDataset, self).__init__(**kwargs) - self.type = 'OracleServiceCloudObject' + super(ResponsysObjectDataset, self).__init__(**kwargs) + self.type = 'ResponsysObject' # type: str self.table_name = kwargs.get('table_name', None) -class OracleTableDataset(Dataset): - """The on-premises Oracle database dataset. +class ResponsysSource(TabularSource): + """A copy activity Responsys source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(ResponsysSource, self).__init__(**kwargs) + self.type = 'ResponsysSource' # type: str + self.query = kwargs.get('query', None) + + +class RestResourceDataset(Dataset): + """A Rest service dataset. All required parameters must be populated in order to send to Azure. @@ -13025,15 +23106,21 @@ class OracleTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table - properties instead. - :type table_name: object - :param schema_type_properties_schema: The schema name of the on-premises Oracle database. Type: + :param relative_url: The relative URL to the resource that the RESTful API provides. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: object - :param table: The table name of the on-premises Oracle database. Type: string (or Expression - with resultType string). - :type table: object + :type relative_url: object + :param request_method: The HTTP method used to call the RESTful API. The default is GET. Type: + string (or Expression with resultType string). + :type request_method: object + :param request_body: The HTTP request body to the RESTful API if requestMethod is POST. Type: + string (or Expression with resultType string). + :type request_body: object + :param additional_headers: The additional HTTP headers in the request to the RESTful API. Type: + string (or Expression with resultType string). + :type additional_headers: object + :param pagination_rules: The pagination rules to compose next page requests. Type: string (or + Expression with resultType string). + :type pagination_rules: object """ _validation = { @@ -13051,178 +23138,341 @@ class OracleTableDataset(Dataset): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'relative_url': {'key': 'typeProperties.relativeUrl', 'type': 'object'}, + 'request_method': {'key': 'typeProperties.requestMethod', 'type': 'object'}, + 'request_body': {'key': 'typeProperties.requestBody', 'type': 'object'}, + 'additional_headers': {'key': 'typeProperties.additionalHeaders', 'type': 'object'}, + 'pagination_rules': {'key': 'typeProperties.paginationRules', 'type': 'object'}, } def __init__( self, **kwargs ): - super(OracleTableDataset, self).__init__(**kwargs) - self.type = 'OracleTable' - self.table_name = kwargs.get('table_name', None) - self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) - self.table = kwargs.get('table', None) + super(RestResourceDataset, self).__init__(**kwargs) + self.type = 'RestResource' # type: str + self.relative_url = kwargs.get('relative_url', None) + self.request_method = kwargs.get('request_method', None) + self.request_body = kwargs.get('request_body', None) + self.additional_headers = kwargs.get('additional_headers', None) + self.pagination_rules = kwargs.get('pagination_rules', None) -class OrcDataset(Dataset): - """ORC dataset. +class RestServiceLinkedService(LinkedService): + """Rest Service linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Type of linked service.Constant filled by server. :type type: str - :param description: Dataset description. + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. + :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. + :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param location: The location of the ORC data storage. - :type location: ~azure.synapse.artifacts.models.DatasetLocation - :param orc_compression_codec: Possible values include: "none", "zlib", "snappy". - :type orc_compression_codec: str or ~azure.synapse.artifacts.models.OrcCompressionCodec + :param url: Required. The base URL of the REST service. + :type url: object + :param enable_server_certificate_validation: Whether to validate server side SSL certificate + when connecting to the endpoint.The default value is true. Type: boolean (or Expression with + resultType boolean). + :type enable_server_certificate_validation: object + :param authentication_type: Required. Type of authentication used to connect to the REST + service. Possible values include: "Anonymous", "Basic", "AadServicePrincipal", + "ManagedServiceIdentity". + :type authentication_type: str or ~azure.synapse.artifacts.models.RestServiceAuthenticationType + :param user_name: The user name used in Basic authentication type. + :type user_name: object + :param password: The password used in Basic authentication type. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param service_principal_id: The application's client ID used in AadServicePrincipal + authentication type. + :type service_principal_id: object + :param service_principal_key: The application's key used in AadServicePrincipal authentication + type. + :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase + :param tenant: The tenant information (domain name or tenant ID) used in AadServicePrincipal + authentication type under which your application resides. + :type tenant: object + :param aad_resource_id: The resource you are requesting authorization to use. + :type aad_resource_id: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, + 'url': {'required': True}, + 'authentication_type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'orc_compression_codec': {'key': 'typeProperties.orcCompressionCodec', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, **kwargs ): - super(OrcDataset, self).__init__(**kwargs) - self.type = 'Orc' - self.location = kwargs.get('location', None) - self.orc_compression_codec = kwargs.get('orc_compression_codec', None) + super(RestServiceLinkedService, self).__init__(**kwargs) + self.type = 'RestService' # type: str + self.url = kwargs['url'] + self.enable_server_certificate_validation = kwargs.get('enable_server_certificate_validation', None) + self.authentication_type = kwargs['authentication_type'] + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs.get('tenant', None) + self.aad_resource_id = kwargs.get('aad_resource_id', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) -class ParameterSpecification(msrest.serialization.Model): - """Definition of a single parameter for an entity. +class RestSource(CopySource): + """A copy activity Rest service source. All required parameters must be populated in order to send to Azure. - :param type: Required. Parameter type. Possible values include: "Object", "String", "Int", - "Float", "Bool", "Array", "SecureString". - :type type: str or ~azure.synapse.artifacts.models.ParameterType - :param default_value: Default value of parameter. - :type default_value: object + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param request_method: The HTTP method used to call the RESTful API. The default is GET. Type: + string (or Expression with resultType string). + :type request_method: object + :param request_body: The HTTP request body to the RESTful API if requestMethod is POST. Type: + string (or Expression with resultType string). + :type request_body: object + :param additional_headers: The additional HTTP headers in the request to the RESTful API. Type: + string (or Expression with resultType string). + :type additional_headers: object + :param pagination_rules: The pagination rules to compose next page requests. Type: string (or + Expression with resultType string). + :type pagination_rules: object + :param http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout + to get a response, not the timeout to read response data. Default value: 00:01:40. Type: string + (or Expression with resultType string), pattern: + ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type http_request_timeout: object + :param request_interval: The time to await before sending next page request. + :type request_interval: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'request_method': {'key': 'requestMethod', 'type': 'object'}, + 'request_body': {'key': 'requestBody', 'type': 'object'}, + 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, + 'pagination_rules': {'key': 'paginationRules', 'type': 'object'}, + 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, + 'request_interval': {'key': 'requestInterval', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(RestSource, self).__init__(**kwargs) + self.type = 'RestSource' # type: str + self.request_method = kwargs.get('request_method', None) + self.request_body = kwargs.get('request_body', None) + self.additional_headers = kwargs.get('additional_headers', None) + self.pagination_rules = kwargs.get('pagination_rules', None) + self.http_request_timeout = kwargs.get('http_request_timeout', None) + self.request_interval = kwargs.get('request_interval', None) + + +class RetryPolicy(msrest.serialization.Model): + """Execution policy for an activity. + + :param count: Maximum ordinary retry attempts. Default is 0. Type: integer (or Expression with + resultType integer), minimum: 0. + :type count: object + :param interval_in_seconds: Interval between retries in seconds. Default is 30. + :type interval_in_seconds: int + """ + + _validation = { + 'interval_in_seconds': {'maximum': 86400, 'minimum': 30}, + } + + _attribute_map = { + 'count': {'key': 'count', 'type': 'object'}, + 'interval_in_seconds': {'key': 'intervalInSeconds', 'type': 'int'}, + } + + def __init__( + self, + **kwargs + ): + super(RetryPolicy, self).__init__(**kwargs) + self.count = kwargs.get('count', None) + self.interval_in_seconds = kwargs.get('interval_in_seconds', None) + + +class RunFilterParameters(msrest.serialization.Model): + """Query parameters for listing runs. + + All required parameters must be populated in order to send to Azure. + + :param continuation_token: The continuation token for getting the next page of results. Null + for first page. + :type continuation_token: str + :param last_updated_after: Required. The time at or after which the run event was updated in + 'ISO 8601' format. + :type last_updated_after: ~datetime.datetime + :param last_updated_before: Required. The time at or before which the run event was updated in + 'ISO 8601' format. + :type last_updated_before: ~datetime.datetime + :param filters: List of filters. + :type filters: list[~azure.synapse.artifacts.models.RunQueryFilter] + :param order_by: List of OrderBy option. + :type order_by: list[~azure.synapse.artifacts.models.RunQueryOrderBy] + """ + + _validation = { + 'last_updated_after': {'required': True}, + 'last_updated_before': {'required': True}, + } + + _attribute_map = { + 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, + 'last_updated_after': {'key': 'lastUpdatedAfter', 'type': 'iso-8601'}, + 'last_updated_before': {'key': 'lastUpdatedBefore', 'type': 'iso-8601'}, + 'filters': {'key': 'filters', 'type': '[RunQueryFilter]'}, + 'order_by': {'key': 'orderBy', 'type': '[RunQueryOrderBy]'}, + } + + def __init__( + self, + **kwargs + ): + super(RunFilterParameters, self).__init__(**kwargs) + self.continuation_token = kwargs.get('continuation_token', None) + self.last_updated_after = kwargs['last_updated_after'] + self.last_updated_before = kwargs['last_updated_before'] + self.filters = kwargs.get('filters', None) + self.order_by = kwargs.get('order_by', None) + + +class RunQueryFilter(msrest.serialization.Model): + """Query filter option for listing runs. + + All required parameters must be populated in order to send to Azure. + + :param operand: Required. Parameter name to be used for filter. The allowed operands to query + pipeline runs are PipelineName, RunStart, RunEnd and Status; to query activity runs are + ActivityName, ActivityRunStart, ActivityRunEnd, ActivityType and Status, and to query trigger + runs are TriggerName, TriggerRunTimestamp and Status. Possible values include: "PipelineName", + "Status", "RunStart", "RunEnd", "ActivityName", "ActivityRunStart", "ActivityRunEnd", + "ActivityType", "TriggerName", "TriggerRunTimestamp", "RunGroupId", "LatestOnly". + :type operand: str or ~azure.synapse.artifacts.models.RunQueryFilterOperand + :param operator: Required. Operator to be used for filter. Possible values include: "Equals", + "NotEquals", "In", "NotIn". + :type operator: str or ~azure.synapse.artifacts.models.RunQueryFilterOperator + :param values: Required. List of filter values. + :type values: list[str] """ _validation = { - 'type': {'required': True}, + 'operand': {'required': True}, + 'operator': {'required': True}, + 'values': {'required': True}, } _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'default_value': {'key': 'defaultValue', 'type': 'object'}, + 'operand': {'key': 'operand', 'type': 'str'}, + 'operator': {'key': 'operator', 'type': 'str'}, + 'values': {'key': 'values', 'type': '[str]'}, } def __init__( self, **kwargs ): - super(ParameterSpecification, self).__init__(**kwargs) - self.type = kwargs['type'] - self.default_value = kwargs.get('default_value', None) + super(RunQueryFilter, self).__init__(**kwargs) + self.operand = kwargs['operand'] + self.operator = kwargs['operator'] + self.values = kwargs['values'] -class ParquetDataset(Dataset): - """Parquet dataset. +class RunQueryOrderBy(msrest.serialization.Model): + """An object to provide order by options for listing runs. All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param location: The location of the parquet storage. - :type location: ~azure.synapse.artifacts.models.DatasetLocation - :param compression_codec: Possible values include: "none", "gzip", "snappy", "lzo". - :type compression_codec: str or ~azure.synapse.artifacts.models.ParquetCompressionCodec + :param order_by: Required. Parameter name to be used for order by. The allowed parameters to + order by for pipeline runs are PipelineName, RunStart, RunEnd and Status; for activity runs are + ActivityName, ActivityRunStart, ActivityRunEnd and Status; for trigger runs are TriggerName, + TriggerRunTimestamp and Status. Possible values include: "RunStart", "RunEnd", "PipelineName", + "Status", "ActivityName", "ActivityRunStart", "ActivityRunEnd", "TriggerName", + "TriggerRunTimestamp". + :type order_by: str or ~azure.synapse.artifacts.models.RunQueryOrderByField + :param order: Required. Sorting order of the parameter. Possible values include: "ASC", "DESC". + :type order: str or ~azure.synapse.artifacts.models.RunQueryOrder """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + 'order_by': {'required': True}, + 'order': {'required': True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'str'}, + 'order_by': {'key': 'orderBy', 'type': 'str'}, + 'order': {'key': 'order', 'type': 'str'}, } def __init__( self, **kwargs ): - super(ParquetDataset, self).__init__(**kwargs) - self.type = 'Parquet' - self.location = kwargs.get('location', None) - self.compression_codec = kwargs.get('compression_codec', None) + super(RunQueryOrderBy, self).__init__(**kwargs) + self.order_by = kwargs['order_by'] + self.order = kwargs['order'] -class PaypalLinkedService(LinkedService): - """Paypal Service linked service. +class SalesforceLinkedService(LinkedService): + """Linked service for Salesforce. All required parameters must be populated in order to send to Azure. @@ -13239,22 +23489,18 @@ class PaypalLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param host: Required. The URL of the PayPal instance. (i.e. api.sandbox.paypal.com). - :type host: object - :param client_id: Required. The client ID associated with your PayPal application. - :type client_id: object - :param client_secret: The client secret associated with your PayPal application. - :type client_secret: ~azure.synapse.artifacts.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using - HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name in the server's - certificate to match the host name of the server when connecting over SSL. The default value is - true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of the server when - connecting over SSL. The default value is true. - :type use_peer_verification: object + :param environment_url: The URL of Salesforce instance. Default is + 'https://login.salesforce.com'. To copy data from sandbox, specify + 'https://test.salesforce.com'. To copy data from custom domain, specify, for example, + 'https://[domain].my.salesforce.com'. Type: string (or Expression with resultType string). + :type environment_url: object + :param username: The username for Basic authentication of the Salesforce instance. Type: string + (or Expression with resultType string). + :type username: object + :param password: The password for Basic authentication of the Salesforce instance. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param security_token: The security token is required to remotely access Salesforce instance. + :type security_token: ~azure.synapse.artifacts.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -13263,8 +23509,6 @@ class PaypalLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'host': {'required': True}, - 'client_id': {'required': True}, } _attribute_map = { @@ -13274,12 +23518,10 @@ class PaypalLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'environment_url': {'key': 'typeProperties.environmentUrl', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'security_token': {'key': 'typeProperties.securityToken', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -13287,127 +23529,50 @@ def __init__( self, **kwargs ): - super(PaypalLinkedService, self).__init__(**kwargs) - self.type = 'Paypal' - self.host = kwargs['host'] - self.client_id = kwargs['client_id'] - self.client_secret = kwargs.get('client_secret', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) + super(SalesforceLinkedService, self).__init__(**kwargs) + self.type = 'Salesforce' # type: str + self.environment_url = kwargs.get('environment_url', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.security_token = kwargs.get('security_token', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) -class PaypalObjectDataset(Dataset): - """Paypal Service dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(PaypalObjectDataset, self).__init__(**kwargs) - self.type = 'PaypalObject' - self.table_name = kwargs.get('table_name', None) - - -class PhoenixLinkedService(LinkedService): - """Phoenix server linked service. +class SalesforceMarketingCloudLinkedService(LinkedService): + """Salesforce Marketing Cloud linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param host: Required. The IP address or host name of the Phoenix server. (i.e. - 192.168.222.160). - :type host: object - :param port: The TCP port that the Phoenix server uses to listen for client connections. The - default value is 8765. - :type port: object - :param http_path: The partial URL corresponding to the Phoenix server. (i.e. - /gateway/sandbox/phoenix/version). The default value is hbasephoenix if using - WindowsAzureHDInsightService. - :type http_path: object - :param authentication_type: Required. The authentication mechanism used to connect to the - Phoenix server. Possible values include: "Anonymous", "UsernameAndPassword", - "WindowsAzureHDInsightService". - :type authentication_type: str or ~azure.synapse.artifacts.models.PhoenixAuthenticationType - :param username: The user name used to connect to the Phoenix server. - :type username: object - :param password: The password corresponding to the user name. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The - default value is false. - :type enable_ssl: object - :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for - verifying the server when connecting over SSL. This property can only be set when using SSL on - self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust - store or from a specified PEM file. The default value is false. - :type use_system_trust_store: object - :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate - name to match the host name of the server when connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: object - :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from - the server. The default value is false. - :type allow_self_signed_server_cert: object + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param client_id: Required. The client ID associated with the Salesforce Marketing Cloud + application. Type: string (or Expression with resultType string). + :type client_id: object + :param client_secret: The client secret associated with the Salesforce Marketing Cloud + application. Type: string (or Expression with resultType string). + :type client_secret: ~azure.synapse.artifacts.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using + HTTPS. The default value is true. Type: boolean (or Expression with resultType boolean). + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name in the server's + certificate to match the host name of the server when connecting over SSL. The default value is + true. Type: boolean (or Expression with resultType boolean). + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of the server when + connecting over SSL. The default value is true. Type: boolean (or Expression with resultType + boolean). + :type use_peer_verification: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -13416,8 +23581,7 @@ class PhoenixLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'host': {'required': True}, - 'authentication_type': {'required': True}, + 'client_id': {'required': True}, } _attribute_map = { @@ -13427,17 +23591,11 @@ class PhoenixLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, - 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, - 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -13445,24 +23603,18 @@ def __init__( self, **kwargs ): - super(PhoenixLinkedService, self).__init__(**kwargs) - self.type = 'Phoenix' - self.host = kwargs['host'] - self.port = kwargs.get('port', None) - self.http_path = kwargs.get('http_path', None) - self.authentication_type = kwargs['authentication_type'] - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.enable_ssl = kwargs.get('enable_ssl', None) - self.trusted_cert_path = kwargs.get('trusted_cert_path', None) - self.use_system_trust_store = kwargs.get('use_system_trust_store', None) - self.allow_host_name_cn_mismatch = kwargs.get('allow_host_name_cn_mismatch', None) - self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) + super(SalesforceMarketingCloudLinkedService, self).__init__(**kwargs) + self.type = 'SalesforceMarketingCloud' # type: str + self.client_id = kwargs['client_id'] + self.client_secret = kwargs.get('client_secret', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) -class PhoenixObjectDataset(Dataset): - """Phoenix server dataset. +class SalesforceMarketingCloudObjectDataset(Dataset): + """Salesforce Marketing Cloud dataset. All required parameters must be populated in order to send to Azure. @@ -13488,15 +23640,8 @@ class PhoenixObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table - properties instead. + :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object - :param table: The table name of the Phoenix. Type: string (or Expression with resultType - string). - :type table: object - :param schema_type_properties_schema: The schema name of the Phoenix. Type: string (or - Expression with resultType string). - :type schema_type_properties_schema: object """ _validation = { @@ -13515,571 +23660,499 @@ class PhoenixObjectDataset(Dataset): 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } def __init__( self, **kwargs ): - super(PhoenixObjectDataset, self).__init__(**kwargs) - self.type = 'PhoenixObject' + super(SalesforceMarketingCloudObjectDataset, self).__init__(**kwargs) + self.type = 'SalesforceMarketingCloudObject' # type: str self.table_name = kwargs.get('table_name', None) - self.table = kwargs.get('table', None) - self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) - - -class PipelineFolder(msrest.serialization.Model): - """The folder that this Pipeline is in. If not specified, Pipeline will appear at the root level. - - :param name: The name of the folder that this Pipeline is in. - :type name: str - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(PipelineFolder, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - - -class PipelineListResponse(msrest.serialization.Model): - """A list of pipeline resources. - - All required parameters must be populated in order to send to Azure. - - :param value: Required. List of pipelines. - :type value: list[~azure.synapse.artifacts.models.PipelineResource] - :param next_link: The link to the next page of results, if any remaining results exist. - :type next_link: str - """ - - _validation = { - 'value': {'required': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': '[PipelineResource]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(PipelineListResponse, self).__init__(**kwargs) - self.value = kwargs['value'] - self.next_link = kwargs.get('next_link', None) - -class PipelineReference(msrest.serialization.Model): - """Pipeline reference type. - Variables are only populated by the server, and will be ignored when sending a request. +class SalesforceMarketingCloudSource(TabularSource): + """A copy activity Salesforce Marketing Cloud source. All required parameters must be populated in order to send to Azure. - :ivar type: Required. Pipeline reference type. Default value: "PipelineReference". - :vartype type: str - :param reference_name: Required. Reference pipeline name. - :type reference_name: str - :param name: Reference name. - :type name: str + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object """ _validation = { - 'type': {'required': True, 'constant': True}, - 'reference_name': {'required': True}, + 'type': {'required': True}, } _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'reference_name': {'key': 'referenceName', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, } - type = "PipelineReference" - def __init__( self, **kwargs ): - super(PipelineReference, self).__init__(**kwargs) - self.reference_name = kwargs['reference_name'] - self.name = kwargs.get('name', None) + super(SalesforceMarketingCloudSource, self).__init__(**kwargs) + self.type = 'SalesforceMarketingCloudSource' # type: str + self.query = kwargs.get('query', None) -class PipelineResource(SubResource): - """Pipeline resource type. +class SalesforceObjectDataset(Dataset): + """The Salesforce object dataset. - Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param description: The description of the pipeline. + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. :type description: str - :param activities: List of activities in pipeline. - :type activities: list[~azure.synapse.artifacts.models.Activity] - :param parameters: List of parameters for pipeline. + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param variables: List of variables for pipeline. - :type variables: dict[str, ~azure.synapse.artifacts.models.VariableSpecification] - :param concurrency: The max number of concurrent runs for the pipeline. - :type concurrency: int - :param annotations: List of tags that can be used for describing the Pipeline. + :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] - :param run_dimensions: Dimensions emitted by Pipeline. - :type run_dimensions: dict[str, object] - :param folder: The folder that this Pipeline is in. If not specified, Pipeline will appear at - the root level. - :type folder: ~azure.synapse.artifacts.models.PipelineFolder + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param object_api_name: The Salesforce object API name. Type: string (or Expression with + resultType string). + :type object_api_name: object """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'concurrency': {'minimum': 1}, + 'type': {'required': True}, + 'linked_service_name': {'required': True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'properties.description', 'type': 'str'}, - 'activities': {'key': 'properties.activities', 'type': '[Activity]'}, - 'parameters': {'key': 'properties.parameters', 'type': '{ParameterSpecification}'}, - 'variables': {'key': 'properties.variables', 'type': '{VariableSpecification}'}, - 'concurrency': {'key': 'properties.concurrency', 'type': 'int'}, - 'annotations': {'key': 'properties.annotations', 'type': '[object]'}, - 'run_dimensions': {'key': 'properties.runDimensions', 'type': '{object}'}, - 'folder': {'key': 'properties.folder', 'type': 'PipelineFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'object_api_name': {'key': 'typeProperties.objectApiName', 'type': 'object'}, } def __init__( self, **kwargs ): - super(PipelineResource, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.description = kwargs.get('description', None) - self.activities = kwargs.get('activities', None) - self.parameters = kwargs.get('parameters', None) - self.variables = kwargs.get('variables', None) - self.concurrency = kwargs.get('concurrency', None) - self.annotations = kwargs.get('annotations', None) - self.run_dimensions = kwargs.get('run_dimensions', None) - self.folder = kwargs.get('folder', None) + super(SalesforceObjectDataset, self).__init__(**kwargs) + self.type = 'SalesforceObject' # type: str + self.object_api_name = kwargs.get('object_api_name', None) -class PipelineRun(msrest.serialization.Model): - """Information about a pipeline run. +class SalesforceServiceCloudLinkedService(LinkedService): + """Linked service for Salesforce Service Cloud. - Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :ivar run_id: Identifier of a run. - :vartype run_id: str - :ivar run_group_id: Identifier that correlates all the recovery runs of a pipeline run. - :vartype run_group_id: str - :ivar is_latest: Indicates if the recovered pipeline run is the latest in its group. - :vartype is_latest: bool - :ivar pipeline_name: The pipeline name. - :vartype pipeline_name: str - :ivar parameters: The full or partial list of parameter name, value pair used in the pipeline - run. - :vartype parameters: dict[str, str] - :ivar invoked_by: Entity that started the pipeline run. - :vartype invoked_by: ~azure.synapse.artifacts.models.PipelineRunInvokedBy - :ivar last_updated: The last updated timestamp for the pipeline run event in ISO8601 format. - :vartype last_updated: ~datetime.datetime - :ivar run_start: The start time of a pipeline run in ISO8601 format. - :vartype run_start: ~datetime.datetime - :ivar run_end: The end time of a pipeline run in ISO8601 format. - :vartype run_end: ~datetime.datetime - :ivar duration_in_ms: The duration of a pipeline run. - :vartype duration_in_ms: int - :ivar status: The status of a pipeline run. - :vartype status: str - :ivar message: The message from a pipeline run. - :vartype message: str + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param environment_url: The URL of Salesforce Service Cloud instance. Default is + 'https://login.salesforce.com'. To copy data from sandbox, specify + 'https://test.salesforce.com'. To copy data from custom domain, specify, for example, + 'https://[domain].my.salesforce.com'. Type: string (or Expression with resultType string). + :type environment_url: object + :param username: The username for Basic authentication of the Salesforce instance. Type: string + (or Expression with resultType string). + :type username: object + :param password: The password for Basic authentication of the Salesforce instance. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param security_token: The security token is required to remotely access Salesforce instance. + :type security_token: ~azure.synapse.artifacts.models.SecretBase + :param extended_properties: Extended properties appended to the connection string. Type: string + (or Expression with resultType string). + :type extended_properties: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object """ _validation = { - 'run_id': {'readonly': True}, - 'run_group_id': {'readonly': True}, - 'is_latest': {'readonly': True}, - 'pipeline_name': {'readonly': True}, - 'parameters': {'readonly': True}, - 'invoked_by': {'readonly': True}, - 'last_updated': {'readonly': True}, - 'run_start': {'readonly': True}, - 'run_end': {'readonly': True}, - 'duration_in_ms': {'readonly': True}, - 'status': {'readonly': True}, - 'message': {'readonly': True}, + 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'run_id': {'key': 'runId', 'type': 'str'}, - 'run_group_id': {'key': 'runGroupId', 'type': 'str'}, - 'is_latest': {'key': 'isLatest', 'type': 'bool'}, - 'pipeline_name': {'key': 'pipelineName', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{str}'}, - 'invoked_by': {'key': 'invokedBy', 'type': 'PipelineRunInvokedBy'}, - 'last_updated': {'key': 'lastUpdated', 'type': 'iso-8601'}, - 'run_start': {'key': 'runStart', 'type': 'iso-8601'}, - 'run_end': {'key': 'runEnd', 'type': 'iso-8601'}, - 'duration_in_ms': {'key': 'durationInMs', 'type': 'int'}, - 'status': {'key': 'status', 'type': 'str'}, - 'message': {'key': 'message', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'environment_url': {'key': 'typeProperties.environmentUrl', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'security_token': {'key': 'typeProperties.securityToken', 'type': 'SecretBase'}, + 'extended_properties': {'key': 'typeProperties.extendedProperties', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, **kwargs ): - super(PipelineRun, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.run_id = None - self.run_group_id = None - self.is_latest = None - self.pipeline_name = None - self.parameters = None - self.invoked_by = None - self.last_updated = None - self.run_start = None - self.run_end = None - self.duration_in_ms = None - self.status = None - self.message = None + super(SalesforceServiceCloudLinkedService, self).__init__(**kwargs) + self.type = 'SalesforceServiceCloud' # type: str + self.environment_url = kwargs.get('environment_url', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.security_token = kwargs.get('security_token', None) + self.extended_properties = kwargs.get('extended_properties', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) -class PipelineRunInvokedBy(msrest.serialization.Model): - """Provides entity name and id that started the pipeline run. +class SalesforceServiceCloudObjectDataset(Dataset): + """The Salesforce Service Cloud object dataset. - Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. - :ivar name: Name of the entity that started the pipeline run. - :vartype name: str - :ivar id: The ID of the entity that started the run. - :vartype id: str - :ivar invoked_by_type: The type of the entity that started the run. - :vartype invoked_by_type: str + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param object_api_name: The Salesforce Service Cloud object API name. Type: string (or + Expression with resultType string). + :type object_api_name: object """ _validation = { - 'name': {'readonly': True}, - 'id': {'readonly': True}, - 'invoked_by_type': {'readonly': True}, + 'type': {'required': True}, + 'linked_service_name': {'required': True}, } _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'id': {'key': 'id', 'type': 'str'}, - 'invoked_by_type': {'key': 'invokedByType', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'object_api_name': {'key': 'typeProperties.objectApiName', 'type': 'object'}, } def __init__( self, **kwargs ): - super(PipelineRunInvokedBy, self).__init__(**kwargs) - self.name = None - self.id = None - self.invoked_by_type = None + super(SalesforceServiceCloudObjectDataset, self).__init__(**kwargs) + self.type = 'SalesforceServiceCloudObject' # type: str + self.object_api_name = kwargs.get('object_api_name', None) -class PipelineRunsQueryResponse(msrest.serialization.Model): - """A list pipeline runs. +class SalesforceServiceCloudSink(CopySink): + """A copy activity Salesforce Service Cloud sink. All required parameters must be populated in order to send to Azure. - :param value: Required. List of pipeline runs. - :type value: list[~azure.synapse.artifacts.models.PipelineRun] - :param continuation_token: The continuation token for getting the next page of results, if any - remaining results exist, null otherwise. - :type continuation_token: str + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param write_behavior: The write behavior for the operation. Default is Insert. Possible values + include: "Insert", "Upsert". + :type write_behavior: str or ~azure.synapse.artifacts.models.SalesforceSinkWriteBehavior + :param external_id_field_name: The name of the external ID field for upsert operation. Default + value is 'Id' column. Type: string (or Expression with resultType string). + :type external_id_field_name: object + :param ignore_null_values: The flag indicating whether or not to ignore null values from input + dataset (except key fields) during write operation. Default value is false. If set it to true, + it means ADF will leave the data in the destination object unchanged when doing upsert/update + operation and insert defined default value when doing insert operation, versus ADF will update + the data in the destination object to NULL when doing upsert/update operation and insert NULL + value when doing insert operation. Type: boolean (or Expression with resultType boolean). + :type ignore_null_values: object """ _validation = { - 'value': {'required': True}, + 'type': {'required': True}, } _attribute_map = { - 'value': {'key': 'value', 'type': '[PipelineRun]'}, - 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, + 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, } def __init__( self, **kwargs ): - super(PipelineRunsQueryResponse, self).__init__(**kwargs) - self.value = kwargs['value'] - self.continuation_token = kwargs.get('continuation_token', None) + super(SalesforceServiceCloudSink, self).__init__(**kwargs) + self.type = 'SalesforceServiceCloudSink' # type: str + self.write_behavior = kwargs.get('write_behavior', None) + self.external_id_field_name = kwargs.get('external_id_field_name', None) + self.ignore_null_values = kwargs.get('ignore_null_values', None) -class PostgreSqlLinkedService(LinkedService): - """Linked service for PostgreSQL data source. +class SalesforceServiceCloudSource(CopySource): + """A copy activity Salesforce Service Cloud source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: Required. The connection string. - :type connection_string: object - :param password: The Azure key vault secret reference of password in connection string. - :type password: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query: Database query. Type: string (or Expression with resultType string). + :type query: object + :param read_behavior: The read behavior for the operation. Default is Query. Possible values + include: "Query", "QueryAll". + :type read_behavior: str or ~azure.synapse.artifacts.models.SalesforceSourceReadBehavior """ _validation = { 'type': {'required': True}, - 'connection_string': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, } def __init__( self, **kwargs ): - super(PostgreSqlLinkedService, self).__init__(**kwargs) - self.type = 'PostgreSql' - self.connection_string = kwargs['connection_string'] - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(SalesforceServiceCloudSource, self).__init__(**kwargs) + self.type = 'SalesforceServiceCloudSource' # type: str + self.query = kwargs.get('query', None) + self.read_behavior = kwargs.get('read_behavior', None) -class PostgreSqlTableDataset(Dataset): - """The PostgreSQL table dataset. +class SalesforceSink(CopySink): + """A copy activity Salesforce sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Copy sink type.Constant filled by server. :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table - properties instead. - :type table_name: object - :param table: The PostgreSQL table name. Type: string (or Expression with resultType string). - :type table: object - :param schema_type_properties_schema: The PostgreSQL schema name. Type: string (or Expression - with resultType string). - :type schema_type_properties_schema: object + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param write_behavior: The write behavior for the operation. Default is Insert. Possible values + include: "Insert", "Upsert". + :type write_behavior: str or ~azure.synapse.artifacts.models.SalesforceSinkWriteBehavior + :param external_id_field_name: The name of the external ID field for upsert operation. Default + value is 'Id' column. Type: string (or Expression with resultType string). + :type external_id_field_name: object + :param ignore_null_values: The flag indicating whether or not to ignore null values from input + dataset (except key fields) during write operation. Default value is false. If set it to true, + it means ADF will leave the data in the destination object unchanged when doing upsert/update + operation and insert defined default value when doing insert operation, versus ADF will update + the data in the destination object to NULL when doing upsert/update operation and insert NULL + value when doing insert operation. Type: boolean (or Expression with resultType boolean). + :type ignore_null_values: object """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, + 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, } def __init__( self, **kwargs ): - super(PostgreSqlTableDataset, self).__init__(**kwargs) - self.type = 'PostgreSqlTable' - self.table_name = kwargs.get('table_name', None) - self.table = kwargs.get('table', None) - self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) + super(SalesforceSink, self).__init__(**kwargs) + self.type = 'SalesforceSink' # type: str + self.write_behavior = kwargs.get('write_behavior', None) + self.external_id_field_name = kwargs.get('external_id_field_name', None) + self.ignore_null_values = kwargs.get('ignore_null_values', None) -class PrestoLinkedService(LinkedService): - """Presto server linked service. +class SalesforceSource(TabularSource): + """A copy activity Salesforce source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param host: Required. The IP address or host name of the Presto server. (i.e. - 192.168.222.160). - :type host: object - :param server_version: Required. The version of the Presto server. (i.e. 0.148-t). - :type server_version: object - :param catalog: Required. The catalog context for all request against the server. - :type catalog: object - :param port: The TCP port that the Presto server uses to listen for client connections. The - default value is 8080. - :type port: object - :param authentication_type: Required. The authentication mechanism used to connect to the - Presto server. Possible values include: "Anonymous", "LDAP". - :type authentication_type: str or ~azure.synapse.artifacts.models.PrestoAuthenticationType - :param username: The user name used to connect to the Presto server. - :type username: object - :param password: The password corresponding to the user name. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The - default value is false. - :type enable_ssl: object - :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for - verifying the server when connecting over SSL. This property can only be set when using SSL on - self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust - store or from a specified PEM file. The default value is false. - :type use_system_trust_store: object - :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate - name to match the host name of the server when connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: object - :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from - the server. The default value is false. - :type allow_self_signed_server_cert: object - :param time_zone_id: The local time zone used by the connection. Valid values for this option - are specified in the IANA Time Zone Database. The default value is the system time zone. - :type time_zone_id: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: Database query. Type: string (or Expression with resultType string). + :type query: object + :param read_behavior: The read behavior for the operation. Default is Query. Possible values + include: "Query", "QueryAll". + :type read_behavior: str or ~azure.synapse.artifacts.models.SalesforceSourceReadBehavior """ _validation = { 'type': {'required': True}, - 'host': {'required': True}, - 'server_version': {'required': True}, - 'catalog': {'required': True}, - 'authentication_type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'server_version': {'key': 'typeProperties.serverVersion', 'type': 'object'}, - 'catalog': {'key': 'typeProperties.catalog', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, - 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, - 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, - 'time_zone_id': {'key': 'typeProperties.timeZoneID', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, } def __init__( self, **kwargs ): - super(PrestoLinkedService, self).__init__(**kwargs) - self.type = 'Presto' - self.host = kwargs['host'] - self.server_version = kwargs['server_version'] - self.catalog = kwargs['catalog'] - self.port = kwargs.get('port', None) - self.authentication_type = kwargs['authentication_type'] - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.enable_ssl = kwargs.get('enable_ssl', None) - self.trusted_cert_path = kwargs.get('trusted_cert_path', None) - self.use_system_trust_store = kwargs.get('use_system_trust_store', None) - self.allow_host_name_cn_mismatch = kwargs.get('allow_host_name_cn_mismatch', None) - self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) - self.time_zone_id = kwargs.get('time_zone_id', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(SalesforceSource, self).__init__(**kwargs) + self.type = 'SalesforceSource' # type: str + self.query = kwargs.get('query', None) + self.read_behavior = kwargs.get('read_behavior', None) -class PrestoObjectDataset(Dataset): - """Presto server dataset. +class SapBwCubeDataset(Dataset): + """The SAP BW cube dataset. All required parameters must be populated in order to send to Azure. @@ -14105,15 +24178,6 @@ class PrestoObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table - properties instead. - :type table_name: object - :param table: The table name of the Presto. Type: string (or Expression with resultType - string). - :type table: object - :param schema_type_properties_schema: The schema name of the Presto. Type: string (or - Expression with resultType string). - :type schema_type_properties_schema: object """ _validation = { @@ -14131,47 +24195,18 @@ class PrestoObjectDataset(Dataset): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(PrestoObjectDataset, self).__init__(**kwargs) - self.type = 'PrestoObject' - self.table_name = kwargs.get('table_name', None) - self.table = kwargs.get('table', None) - self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) - - -class QueryDataFlowDebugSessionsResponse(msrest.serialization.Model): - """A list of active debug sessions. - - :param value: Array with all active debug sessions. - :type value: list[~azure.synapse.artifacts.models.DataFlowDebugSessionInfo] - :param next_link: The link to the next page of results, if any remaining results exist. - :type next_link: str - """ - - _attribute_map = { - 'value': {'key': 'value', 'type': '[DataFlowDebugSessionInfo]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, } def __init__( self, **kwargs ): - super(QueryDataFlowDebugSessionsResponse, self).__init__(**kwargs) - self.value = kwargs.get('value', None) - self.next_link = kwargs.get('next_link', None) + super(SapBwCubeDataset, self).__init__(**kwargs) + self.type = 'SapBwCube' # type: str -class QuickBooksLinkedService(LinkedService): - """QuickBooks server linked service. +class SapBWLinkedService(LinkedService): + """SAP Business Warehouse Linked Service. All required parameters must be populated in order to send to Azure. @@ -14188,36 +24223,31 @@ class QuickBooksLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param endpoint: Required. The endpoint of the QuickBooks server. (i.e. - quickbooks.api.intuit.com). - :type endpoint: object - :param company_id: Required. The company ID of the QuickBooks company to authorize. - :type company_id: object - :param consumer_key: Required. The consumer key for OAuth 1.0 authentication. - :type consumer_key: object - :param consumer_secret: Required. The consumer secret for OAuth 1.0 authentication. - :type consumer_secret: ~azure.synapse.artifacts.models.SecretBase - :param access_token: Required. The access token for OAuth 1.0 authentication. - :type access_token: ~azure.synapse.artifacts.models.SecretBase - :param access_token_secret: Required. The access token secret for OAuth 1.0 authentication. - :type access_token_secret: ~azure.synapse.artifacts.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using - HTTPS. The default value is true. - :type use_encrypted_endpoints: object + :param server: Required. Host name of the SAP BW instance. Type: string (or Expression with + resultType string). + :type server: object + :param system_number: Required. System number of the BW system. (Usually a two-digit decimal + number represented as a string.) Type: string (or Expression with resultType string). + :type system_number: object + :param client_id: Required. Client ID of the client on the BW system. (Usually a three-digit + decimal number represented as a string) Type: string (or Expression with resultType string). + :type client_id: object + :param user_name: Username to access the SAP BW server. Type: string (or Expression with + resultType string). + :type user_name: object + :param password: Password to access the SAP BW server. + :type password: ~azure.synapse.artifacts.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ - _validation = { - 'type': {'required': True}, - 'endpoint': {'required': True}, - 'company_id': {'required': True}, - 'consumer_key': {'required': True}, - 'consumer_secret': {'required': True}, - 'access_token': {'required': True}, - 'access_token_secret': {'required': True}, + _validation = { + 'type': {'required': True}, + 'server': {'required': True}, + 'system_number': {'required': True}, + 'client_id': {'required': True}, } _attribute_map = { @@ -14227,13 +24257,11 @@ class QuickBooksLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'company_id': {'key': 'typeProperties.companyId', 'type': 'object'}, - 'consumer_key': {'key': 'typeProperties.consumerKey', 'type': 'object'}, - 'consumer_secret': {'key': 'typeProperties.consumerSecret', 'type': 'SecretBase'}, - 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, - 'access_token_secret': {'key': 'typeProperties.accessTokenSecret', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'system_number': {'key': 'typeProperties.systemNumber', 'type': 'object'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -14241,116 +24269,130 @@ def __init__( self, **kwargs ): - super(QuickBooksLinkedService, self).__init__(**kwargs) - self.type = 'QuickBooks' - self.endpoint = kwargs['endpoint'] - self.company_id = kwargs['company_id'] - self.consumer_key = kwargs['consumer_key'] - self.consumer_secret = kwargs['consumer_secret'] - self.access_token = kwargs['access_token'] - self.access_token_secret = kwargs['access_token_secret'] - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + super(SapBWLinkedService, self).__init__(**kwargs) + self.type = 'SapBW' # type: str + self.server = kwargs['server'] + self.system_number = kwargs['system_number'] + self.client_id = kwargs['client_id'] + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) -class QuickBooksObjectDataset(Dataset): - """QuickBooks server dataset. +class SapBwSource(TabularSource): + """A copy activity source for SapBW server via MDX. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: MDX query. Type: string (or Expression with resultType string). + :type query: object """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, } def __init__( self, **kwargs ): - super(QuickBooksObjectDataset, self).__init__(**kwargs) - self.type = 'QuickBooksObject' - self.table_name = kwargs.get('table_name', None) + super(SapBwSource, self).__init__(**kwargs) + self.type = 'SapBwSource' # type: str + self.query = kwargs.get('query', None) -class RedirectIncompatibleRowSettings(msrest.serialization.Model): - """Redirect incompatible row settings. +class SapCloudForCustomerLinkedService(LinkedService): + """Linked service for SAP Cloud for Customer. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param linked_service_name: Required. Name of the Azure Storage, Storage SAS, or Azure Data - Lake Store linked service used for redirecting incompatible row. Must be specified if - redirectIncompatibleRowSettings is specified. Type: string (or Expression with resultType - string). - :type linked_service_name: object - :param path: The path for storing the redirect incompatible row data. Type: string (or - Expression with resultType string). - :type path: object + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param url: Required. The URL of SAP Cloud for Customer OData API. For example, + '[https://[tenantname].crm.ondemand.com/sap/c4c/odata/v1]'. Type: string (or Expression with + resultType string). + :type url: object + :param username: The username for Basic authentication. Type: string (or Expression with + resultType string). + :type username: object + :param password: The password for Basic authentication. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Either encryptedCredential or + username/password must be provided. Type: string (or Expression with resultType string). + :type encrypted_credential: object """ _validation = { - 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'url': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'object'}, - 'path': {'key': 'path', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, **kwargs ): - super(RedirectIncompatibleRowSettings, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.linked_service_name = kwargs['linked_service_name'] - self.path = kwargs.get('path', None) + super(SapCloudForCustomerLinkedService, self).__init__(**kwargs) + self.type = 'SapCloudForCustomer' # type: str + self.url = kwargs['url'] + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) -class RelationalTableDataset(Dataset): - """The relational table dataset. +class SapCloudForCustomerResourceDataset(Dataset): + """The path of the SAP Cloud for Customer OData entity. All required parameters must be populated in order to send to Azure. @@ -14376,14 +24418,15 @@ class RelationalTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The relational table name. Type: string (or Expression with resultType - string). - :type table_name: object + :param path: Required. The path of the SAP Cloud for Customer OData entity. Type: string (or + Expression with resultType string). + :type path: object """ _validation = { 'type': {'required': True}, 'linked_service_name': {'required': True}, + 'path': {'required': True}, } _attribute_map = { @@ -14396,246 +24439,298 @@ class RelationalTableDataset(Dataset): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'path': {'key': 'typeProperties.path', 'type': 'object'}, } def __init__( self, **kwargs ): - super(RelationalTableDataset, self).__init__(**kwargs) - self.type = 'RelationalTable' - self.table_name = kwargs.get('table_name', None) - + super(SapCloudForCustomerResourceDataset, self).__init__(**kwargs) + self.type = 'SapCloudForCustomerResource' # type: str + self.path = kwargs['path'] -class RerunTriggerListResponse(msrest.serialization.Model): - """A list of rerun triggers. - Variables are only populated by the server, and will be ignored when sending a request. +class SapCloudForCustomerSink(CopySink): + """A copy activity SAP Cloud for Customer sink. All required parameters must be populated in order to send to Azure. - :param value: Required. List of rerun triggers. - :type value: list[~azure.synapse.artifacts.models.RerunTriggerResource] - :ivar next_link: The continuation token for getting the next page of results, if any remaining - results exist, null otherwise. - :vartype next_link: str + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param write_behavior: The write behavior for the operation. Default is 'Insert'. Possible + values include: "Insert", "Update". + :type write_behavior: str or + ~azure.synapse.artifacts.models.SapCloudForCustomerSinkWriteBehavior """ _validation = { - 'value': {'required': True}, - 'next_link': {'readonly': True}, + 'type': {'required': True}, } _attribute_map = { - 'value': {'key': 'value', 'type': '[RerunTriggerResource]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, } def __init__( self, **kwargs ): - super(RerunTriggerListResponse, self).__init__(**kwargs) - self.value = kwargs['value'] - self.next_link = None - + super(SapCloudForCustomerSink, self).__init__(**kwargs) + self.type = 'SapCloudForCustomerSink' # type: str + self.write_behavior = kwargs.get('write_behavior', None) -class RerunTriggerResource(SubResource): - """RerunTrigger resource type. - Variables are only populated by the server, and will be ignored when sending a request. +class SapCloudForCustomerSource(TabularSource): + """A copy activity source for SAP Cloud for Customer source. All required parameters must be populated in order to send to Azure. - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - :param properties: Required. Properties of the rerun trigger. - :type properties: ~azure.synapse.artifacts.models.RerunTumblingWindowTrigger + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: SAP Cloud for Customer OData query. For example, "$top=1". Type: string (or + Expression with resultType string). + :type query: object """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'properties': {'required': True}, + 'type': {'required': True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'RerunTumblingWindowTrigger'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, } def __init__( self, **kwargs ): - super(RerunTriggerResource, self).__init__(**kwargs) - self.properties = kwargs['properties'] + super(SapCloudForCustomerSource, self).__init__(**kwargs) + self.type = 'SapCloudForCustomerSource' # type: str + self.query = kwargs.get('query', None) -class RerunTumblingWindowTrigger(Trigger): - """Trigger that schedules pipeline reruns for all fixed time interval windows from a requested start time to requested end time. - - Variables are only populated by the server, and will be ignored when sending a request. +class SapEccLinkedService(LinkedService): + """Linked service for SAP ERP Central Component(SAP ECC). All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Trigger type.Constant filled by server. + :param type: Required. Type of linked service.Constant filled by server. :type type: str - :param description: Trigger description. + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. :type description: str - :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are - called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". - :vartype runtime_state: str or ~azure.synapse.artifacts.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the trigger. + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param parent_trigger: The parent trigger reference. - :type parent_trigger: object - :param requested_start_time: Required. The start time for the time period for which restatement - is initiated. Only UTC time is currently supported. - :type requested_start_time: ~datetime.datetime - :param requested_end_time: Required. The end time for the time period for which restatement is - initiated. Only UTC time is currently supported. - :type requested_end_time: ~datetime.datetime - :param max_concurrency: Required. The max number of parallel time windows (ready for execution) - for which a rerun is triggered. - :type max_concurrency: int + :param url: Required. The URL of SAP ECC OData API. For example, + '[https://hostname:port/sap/opu/odata/sap/servicename/]'. Type: string (or Expression with + resultType string). + :type url: str + :param username: The username for Basic authentication. Type: string (or Expression with + resultType string). + :type username: str + :param password: The password for Basic authentication. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Either encryptedCredential or + username/password must be provided. Type: string (or Expression with resultType string). + :type encrypted_credential: str """ _validation = { 'type': {'required': True}, - 'runtime_state': {'readonly': True}, - 'requested_start_time': {'required': True}, - 'requested_end_time': {'required': True}, - 'max_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, + 'url': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, - 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'parent_trigger': {'key': 'typeProperties.parentTrigger', 'type': 'object'}, - 'requested_start_time': {'key': 'typeProperties.requestedStartTime', 'type': 'iso-8601'}, - 'requested_end_time': {'key': 'typeProperties.requestedEndTime', 'type': 'iso-8601'}, - 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, + 'url': {'key': 'typeProperties.url', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'str'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, } def __init__( self, **kwargs ): - super(RerunTumblingWindowTrigger, self).__init__(**kwargs) - self.type = 'RerunTumblingWindowTrigger' - self.parent_trigger = kwargs.get('parent_trigger', None) - self.requested_start_time = kwargs['requested_start_time'] - self.requested_end_time = kwargs['requested_end_time'] - self.max_concurrency = kwargs['max_concurrency'] + super(SapEccLinkedService, self).__init__(**kwargs) + self.type = 'SapEcc' # type: str + self.url = kwargs['url'] + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) -class RerunTumblingWindowTriggerActionParameters(msrest.serialization.Model): - """Rerun tumbling window trigger Parameters. +class SapEccResourceDataset(Dataset): + """The path of the SAP ECC OData entity. All required parameters must be populated in order to send to Azure. - :param start_time: Required. The start time for the time period for which restatement is - initiated. Only UTC time is currently supported. - :type start_time: ~datetime.datetime - :param end_time: Required. The end time for the time period for which restatement is initiated. - Only UTC time is currently supported. - :type end_time: ~datetime.datetime - :param max_concurrency: Required. The max number of parallel time windows (ready for execution) - for which a rerun is triggered. - :type max_concurrency: int + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param path: Required. The path of the SAP ECC OData entity. Type: string (or Expression with + resultType string). + :type path: object """ _validation = { - 'start_time': {'required': True}, - 'end_time': {'required': True}, - 'max_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + 'path': {'required': True}, } _attribute_map = { - 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, - 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, - 'max_concurrency': {'key': 'maxConcurrency', 'type': 'int'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'path': {'key': 'typeProperties.path', 'type': 'object'}, } def __init__( self, **kwargs ): - super(RerunTumblingWindowTriggerActionParameters, self).__init__(**kwargs) - self.start_time = kwargs['start_time'] - self.end_time = kwargs['end_time'] - self.max_concurrency = kwargs['max_concurrency'] + super(SapEccResourceDataset, self).__init__(**kwargs) + self.type = 'SapEccResource' # type: str + self.path = kwargs['path'] -class Resource(msrest.serialization.Model): - """Azure Synapse top-level resource. +class SapEccSource(TabularSource): + """A copy activity source for SAP ECC source. - Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :param location: The resource location. - :type location: str - :param tags: A set of tags. The resource tags. - :type tags: dict[str, str] - :ivar e_tag: Etag identifies change in the resource. - :vartype e_tag: str + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: SAP ECC OData query. For example, "$top=1". Type: string (or Expression with + resultType string). + :type query: object """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'e_tag': {'readonly': True}, + 'type': {'required': True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'location': {'key': 'location', 'type': 'str'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'e_tag': {'key': 'eTag', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, } def __init__( self, **kwargs ): - super(Resource, self).__init__(**kwargs) - self.id = None - self.name = None - self.type = None - self.location = kwargs.get('location', None) - self.tags = kwargs.get('tags', None) - self.e_tag = None + super(SapEccSource, self).__init__(**kwargs) + self.type = 'SapEccSource' # type: str + self.query = kwargs.get('query', None) -class ResponsysLinkedService(LinkedService): - """Responsys linked service. +class SapHanaLinkedService(LinkedService): + """SAP HANA Linked Service. All required parameters must be populated in order to send to Azure. @@ -14652,25 +24747,20 @@ class ResponsysLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param endpoint: Required. The endpoint of the Responsys server. - :type endpoint: object - :param client_id: Required. The client ID associated with the Responsys application. Type: - string (or Expression with resultType string). - :type client_id: object - :param client_secret: The client secret associated with the Responsys application. Type: string - (or Expression with resultType string). - :type client_secret: ~azure.synapse.artifacts.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using - HTTPS. The default value is true. Type: boolean (or Expression with resultType boolean). - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name in the server's - certificate to match the host name of the server when connecting over SSL. The default value is - true. Type: boolean (or Expression with resultType boolean). - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of the server when - connecting over SSL. The default value is true. Type: boolean (or Expression with resultType - boolean). - :type use_peer_verification: object + :param connection_string: SAP HANA ODBC connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param server: Required. Host name of the SAP HANA server. Type: string (or Expression with + resultType string). + :type server: object + :param authentication_type: The authentication type to be used to connect to the SAP HANA + server. Possible values include: "Basic", "Windows". + :type authentication_type: str or ~azure.synapse.artifacts.models.SapHanaAuthenticationType + :param user_name: Username to access the SAP HANA server. Type: string (or Expression with + resultType string). + :type user_name: object + :param password: Password to access the SAP HANA server. + :type password: ~azure.synapse.artifacts.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -14679,8 +24769,7 @@ class ResponsysLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'endpoint': {'required': True}, - 'client_id': {'required': True}, + 'server': {'required': True}, } _attribute_map = { @@ -14690,12 +24779,11 @@ class ResponsysLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -14703,77 +24791,102 @@ def __init__( self, **kwargs ): - super(ResponsysLinkedService, self).__init__(**kwargs) - self.type = 'Responsys' - self.endpoint = kwargs['endpoint'] - self.client_id = kwargs['client_id'] - self.client_secret = kwargs.get('client_secret', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) + super(SapHanaLinkedService, self).__init__(**kwargs) + self.type = 'SapHana' # type: str + self.connection_string = kwargs.get('connection_string', None) + self.server = kwargs['server'] + self.authentication_type = kwargs.get('authentication_type', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) -class ResponsysObjectDataset(Dataset): - """Responsys dataset. +class SapHanaPartitionSettings(msrest.serialization.Model): + """The settings that will be leveraged for SAP HANA source partitioning. + + :param partition_column_name: The name of the column that will be used for proceeding range + partitioning. Type: string (or Expression with resultType string). + :type partition_column_name: object + """ + + _attribute_map = { + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(SapHanaPartitionSettings, self).__init__(**kwargs) + self.partition_column_name = kwargs.get('partition_column_name', None) + + +class SapHanaSource(TabularSource): + """A copy activity source for SAP HANA source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: SAP HANA Sql query. Type: string (or Expression with resultType string). + :type query: object + :param packet_size: The packet size of data read from SAP HANA. Type: integer(or Expression + with resultType integer). + :type packet_size: object + :param partition_option: The partition mechanism that will be used for SAP HANA read in + parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "SapHanaDynamicRange". + :type partition_option: str or ~azure.synapse.artifacts.models.SapHanaPartitionOption + :param partition_settings: The settings that will be leveraged for SAP HANA source + partitioning. + :type partition_settings: ~azure.synapse.artifacts.models.SapHanaPartitionSettings """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + 'packet_size': {'key': 'packetSize', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'SapHanaPartitionSettings'}, } def __init__( self, **kwargs ): - super(ResponsysObjectDataset, self).__init__(**kwargs) - self.type = 'ResponsysObject' - self.table_name = kwargs.get('table_name', None) + super(SapHanaSource, self).__init__(**kwargs) + self.type = 'SapHanaSource' # type: str + self.query = kwargs.get('query', None) + self.packet_size = kwargs.get('packet_size', None) + self.partition_option = kwargs.get('partition_option', None) + self.partition_settings = kwargs.get('partition_settings', None) -class RestResourceDataset(Dataset): - """A Rest service dataset. +class SapHanaTableDataset(Dataset): + """SAP HANA Table properties. All required parameters must be populated in order to send to Azure. @@ -14799,21 +24912,11 @@ class RestResourceDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param relative_url: The relative URL to the resource that the RESTful API provides. Type: - string (or Expression with resultType string). - :type relative_url: object - :param request_method: The HTTP method used to call the RESTful API. The default is GET. Type: - string (or Expression with resultType string). - :type request_method: object - :param request_body: The HTTP request body to the RESTful API if requestMethod is POST. Type: - string (or Expression with resultType string). - :type request_body: object - :param additional_headers: The additional HTTP headers in the request to the RESTful API. Type: - string (or Expression with resultType string). - :type additional_headers: object - :param pagination_rules: The pagination rules to compose next page requests. Type: string (or - Expression with resultType string). - :type pagination_rules: object + :param schema_type_properties_schema: The schema name of SAP HANA. Type: string (or Expression + with resultType string). + :type schema_type_properties_schema: object + :param table: The table name of SAP HANA. Type: string (or Expression with resultType string). + :type table: object """ _validation = { @@ -14831,28 +24934,22 @@ class RestResourceDataset(Dataset): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'relative_url': {'key': 'typeProperties.relativeUrl', 'type': 'object'}, - 'request_method': {'key': 'typeProperties.requestMethod', 'type': 'object'}, - 'request_body': {'key': 'typeProperties.requestBody', 'type': 'object'}, - 'additional_headers': {'key': 'typeProperties.additionalHeaders', 'type': 'object'}, - 'pagination_rules': {'key': 'typeProperties.paginationRules', 'type': 'object'}, + 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, } def __init__( self, **kwargs ): - super(RestResourceDataset, self).__init__(**kwargs) - self.type = 'RestResource' - self.relative_url = kwargs.get('relative_url', None) - self.request_method = kwargs.get('request_method', None) - self.request_body = kwargs.get('request_body', None) - self.additional_headers = kwargs.get('additional_headers', None) - self.pagination_rules = kwargs.get('pagination_rules', None) + super(SapHanaTableDataset, self).__init__(**kwargs) + self.type = 'SapHanaTable' # type: str + self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) + self.table = kwargs.get('table', None) -class RestServiceLinkedService(LinkedService): - """Rest Service linked service. +class SapOpenHubLinkedService(LinkedService): + """SAP Business Warehouse Open Hub Destination Linked Service. All required parameters must be populated in order to send to Azure. @@ -14869,31 +24966,26 @@ class RestServiceLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param url: Required. The base URL of the REST service. - :type url: object - :param enable_server_certificate_validation: Whether to validate server side SSL certificate - when connecting to the endpoint.The default value is true. Type: boolean (or Expression with - resultType boolean). - :type enable_server_certificate_validation: object - :param authentication_type: Required. Type of authentication used to connect to the REST - service. Possible values include: "Anonymous", "Basic", "AadServicePrincipal", - "ManagedServiceIdentity". - :type authentication_type: str or ~azure.synapse.artifacts.models.RestServiceAuthenticationType - :param user_name: The user name used in Basic authentication type. + :param server: Required. Host name of the SAP BW instance where the open hub destination is + located. Type: string (or Expression with resultType string). + :type server: object + :param system_number: Required. System number of the BW system where the open hub destination + is located. (Usually a two-digit decimal number represented as a string.) Type: string (or + Expression with resultType string). + :type system_number: object + :param client_id: Required. Client ID of the client on the BW system where the open hub + destination is located. (Usually a three-digit decimal number represented as a string) Type: + string (or Expression with resultType string). + :type client_id: object + :param language: Language of the BW system where the open hub destination is located. The + default value is EN. Type: string (or Expression with resultType string). + :type language: object + :param user_name: Username to access the SAP BW server where the open hub destination is + located. Type: string (or Expression with resultType string). :type user_name: object - :param password: The password used in Basic authentication type. + :param password: Password to access the SAP BW server where the open hub destination is + located. :type password: ~azure.synapse.artifacts.models.SecretBase - :param service_principal_id: The application's client ID used in AadServicePrincipal - authentication type. - :type service_principal_id: object - :param service_principal_key: The application's key used in AadServicePrincipal authentication - type. - :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase - :param tenant: The tenant information (domain name or tenant ID) used in AadServicePrincipal - authentication type under which your application resides. - :type tenant: object - :param aad_resource_id: The resource you are requesting authorization to use. - :type aad_resource_id: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -14902,8 +24994,9 @@ class RestServiceLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'url': {'required': True}, - 'authentication_type': {'required': True}, + 'server': {'required': True}, + 'system_number': {'required': True}, + 'client_id': {'required': True}, } _attribute_map = { @@ -14913,15 +25006,12 @@ class RestServiceLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'system_number': {'key': 'typeProperties.systemNumber', 'type': 'object'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'language': {'key': 'typeProperties.language', 'type': 'object'}, 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -14929,210 +25019,146 @@ def __init__( self, **kwargs ): - super(RestServiceLinkedService, self).__init__(**kwargs) - self.type = 'RestService' - self.url = kwargs['url'] - self.enable_server_certificate_validation = kwargs.get('enable_server_certificate_validation', None) - self.authentication_type = kwargs['authentication_type'] + super(SapOpenHubLinkedService, self).__init__(**kwargs) + self.type = 'SapOpenHub' # type: str + self.server = kwargs['server'] + self.system_number = kwargs['system_number'] + self.client_id = kwargs['client_id'] + self.language = kwargs.get('language', None) self.user_name = kwargs.get('user_name', None) self.password = kwargs.get('password', None) - self.service_principal_id = kwargs.get('service_principal_id', None) - self.service_principal_key = kwargs.get('service_principal_key', None) - self.tenant = kwargs.get('tenant', None) - self.aad_resource_id = kwargs.get('aad_resource_id', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) -class RunFilterParameters(msrest.serialization.Model): - """Query parameters for listing runs. - - All required parameters must be populated in order to send to Azure. - - :param continuation_token: The continuation token for getting the next page of results. Null - for first page. - :type continuation_token: str - :param last_updated_after: Required. The time at or after which the run event was updated in - 'ISO 8601' format. - :type last_updated_after: ~datetime.datetime - :param last_updated_before: Required. The time at or before which the run event was updated in - 'ISO 8601' format. - :type last_updated_before: ~datetime.datetime - :param filters: List of filters. - :type filters: list[~azure.synapse.artifacts.models.RunQueryFilter] - :param order_by: List of OrderBy option. - :type order_by: list[~azure.synapse.artifacts.models.RunQueryOrderBy] - """ - - _validation = { - 'last_updated_after': {'required': True}, - 'last_updated_before': {'required': True}, - } - - _attribute_map = { - 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, - 'last_updated_after': {'key': 'lastUpdatedAfter', 'type': 'iso-8601'}, - 'last_updated_before': {'key': 'lastUpdatedBefore', 'type': 'iso-8601'}, - 'filters': {'key': 'filters', 'type': '[RunQueryFilter]'}, - 'order_by': {'key': 'orderBy', 'type': '[RunQueryOrderBy]'}, - } - - def __init__( - self, - **kwargs - ): - super(RunFilterParameters, self).__init__(**kwargs) - self.continuation_token = kwargs.get('continuation_token', None) - self.last_updated_after = kwargs['last_updated_after'] - self.last_updated_before = kwargs['last_updated_before'] - self.filters = kwargs.get('filters', None) - self.order_by = kwargs.get('order_by', None) - - -class RunQueryFilter(msrest.serialization.Model): - """Query filter option for listing runs. - - All required parameters must be populated in order to send to Azure. - - :param operand: Required. Parameter name to be used for filter. The allowed operands to query - pipeline runs are PipelineName, RunStart, RunEnd and Status; to query activity runs are - ActivityName, ActivityRunStart, ActivityRunEnd, ActivityType and Status, and to query trigger - runs are TriggerName, TriggerRunTimestamp and Status. Possible values include: "PipelineName", - "Status", "RunStart", "RunEnd", "ActivityName", "ActivityRunStart", "ActivityRunEnd", - "ActivityType", "TriggerName", "TriggerRunTimestamp", "RunGroupId", "LatestOnly". - :type operand: str or ~azure.synapse.artifacts.models.RunQueryFilterOperand - :param operator: Required. Operator to be used for filter. Possible values include: "Equals", - "NotEquals", "In", "NotIn". - :type operator: str or ~azure.synapse.artifacts.models.RunQueryFilterOperator - :param values: Required. List of filter values. - :type values: list[str] - """ - - _validation = { - 'operand': {'required': True}, - 'operator': {'required': True}, - 'values': {'required': True}, - } - - _attribute_map = { - 'operand': {'key': 'operand', 'type': 'str'}, - 'operator': {'key': 'operator', 'type': 'str'}, - 'values': {'key': 'values', 'type': '[str]'}, - } - - def __init__( - self, - **kwargs - ): - super(RunQueryFilter, self).__init__(**kwargs) - self.operand = kwargs['operand'] - self.operator = kwargs['operator'] - self.values = kwargs['values'] - - -class RunQueryOrderBy(msrest.serialization.Model): - """An object to provide order by options for listing runs. +class SapOpenHubSource(TabularSource): + """A copy activity source for SAP Business Warehouse Open Hub Destination source. All required parameters must be populated in order to send to Azure. - :param order_by: Required. Parameter name to be used for order by. The allowed parameters to - order by for pipeline runs are PipelineName, RunStart, RunEnd and Status; for activity runs are - ActivityName, ActivityRunStart, ActivityRunEnd and Status; for trigger runs are TriggerName, - TriggerRunTimestamp and Status. Possible values include: "RunStart", "RunEnd", "PipelineName", - "Status", "ActivityName", "ActivityRunStart", "ActivityRunEnd", "TriggerName", - "TriggerRunTimestamp". - :type order_by: str or ~azure.synapse.artifacts.models.RunQueryOrderByField - :param order: Required. Sorting order of the parameter. Possible values include: "ASC", "DESC". - :type order: str or ~azure.synapse.artifacts.models.RunQueryOrder + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param exclude_last_request: Whether to exclude the records of the last request. The default + value is true. Type: boolean (or Expression with resultType boolean). + :type exclude_last_request: object + :param base_request_id: The ID of request for delta loading. Once it is set, only data with + requestId larger than the value of this property will be retrieved. The default value is 0. + Type: integer (or Expression with resultType integer ). + :type base_request_id: object """ _validation = { - 'order_by': {'required': True}, - 'order': {'required': True}, + 'type': {'required': True}, } _attribute_map = { - 'order_by': {'key': 'orderBy', 'type': 'str'}, - 'order': {'key': 'order', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'exclude_last_request': {'key': 'excludeLastRequest', 'type': 'object'}, + 'base_request_id': {'key': 'baseRequestId', 'type': 'object'}, } def __init__( self, **kwargs ): - super(RunQueryOrderBy, self).__init__(**kwargs) - self.order_by = kwargs['order_by'] - self.order = kwargs['order'] + super(SapOpenHubSource, self).__init__(**kwargs) + self.type = 'SapOpenHubSource' # type: str + self.exclude_last_request = kwargs.get('exclude_last_request', None) + self.base_request_id = kwargs.get('base_request_id', None) -class SalesforceLinkedService(LinkedService): - """Linked service for Salesforce. +class SapOpenHubTableDataset(Dataset): + """Sap Business Warehouse Open Hub Destination Table properties. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Type of dataset.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. + :param description: Dataset description. :type description: str - :param parameters: Parameters for linked service. + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. + :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] - :param environment_url: The URL of Salesforce instance. Default is - 'https://login.salesforce.com'. To copy data from sandbox, specify - 'https://test.salesforce.com'. To copy data from custom domain, specify, for example, - 'https://[domain].my.salesforce.com'. Type: string (or Expression with resultType string). - :type environment_url: object - :param username: The username for Basic authentication of the Salesforce instance. Type: string - (or Expression with resultType string). - :type username: object - :param password: The password for Basic authentication of the Salesforce instance. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param security_token: The security token is required to remotely access Salesforce instance. - :type security_token: ~azure.synapse.artifacts.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param open_hub_destination_name: Required. The name of the Open Hub Destination with + destination type as Database Table. Type: string (or Expression with resultType string). + :type open_hub_destination_name: object + :param exclude_last_request: Whether to exclude the records of the last request. The default + value is true. Type: boolean (or Expression with resultType boolean). + :type exclude_last_request: object + :param base_request_id: The ID of request for delta loading. Once it is set, only data with + requestId larger than the value of this property will be retrieved. The default value is 0. + Type: integer (or Expression with resultType integer ). + :type base_request_id: object """ _validation = { 'type': {'required': True}, + 'linked_service_name': {'required': True}, + 'open_hub_destination_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'environment_url': {'key': 'typeProperties.environmentUrl', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'security_token': {'key': 'typeProperties.securityToken', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'open_hub_destination_name': {'key': 'typeProperties.openHubDestinationName', 'type': 'object'}, + 'exclude_last_request': {'key': 'typeProperties.excludeLastRequest', 'type': 'object'}, + 'base_request_id': {'key': 'typeProperties.baseRequestId', 'type': 'object'}, } def __init__( self, **kwargs ): - super(SalesforceLinkedService, self).__init__(**kwargs) - self.type = 'Salesforce' - self.environment_url = kwargs.get('environment_url', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.security_token = kwargs.get('security_token', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(SapOpenHubTableDataset, self).__init__(**kwargs) + self.type = 'SapOpenHubTable' # type: str + self.open_hub_destination_name = kwargs['open_hub_destination_name'] + self.exclude_last_request = kwargs.get('exclude_last_request', None) + self.base_request_id = kwargs.get('base_request_id', None) -class SalesforceMarketingCloudLinkedService(LinkedService): - """Salesforce Marketing Cloud linked service. +class SapTableLinkedService(LinkedService): + """SAP Table Linked Service. All required parameters must be populated in order to send to Azure. @@ -15149,23 +25175,52 @@ class SalesforceMarketingCloudLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param client_id: Required. The client ID associated with the Salesforce Marketing Cloud - application. Type: string (or Expression with resultType string). + :param server: Host name of the SAP instance where the table is located. Type: string (or + Expression with resultType string). + :type server: object + :param system_number: System number of the SAP system where the table is located. (Usually a + two-digit decimal number represented as a string.) Type: string (or Expression with resultType + string). + :type system_number: object + :param client_id: Client ID of the client on the SAP system where the table is located. + (Usually a three-digit decimal number represented as a string) Type: string (or Expression with + resultType string). :type client_id: object - :param client_secret: The client secret associated with the Salesforce Marketing Cloud - application. Type: string (or Expression with resultType string). - :type client_secret: ~azure.synapse.artifacts.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using - HTTPS. The default value is true. Type: boolean (or Expression with resultType boolean). - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name in the server's - certificate to match the host name of the server when connecting over SSL. The default value is - true. Type: boolean (or Expression with resultType boolean). - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of the server when - connecting over SSL. The default value is true. Type: boolean (or Expression with resultType - boolean). - :type use_peer_verification: object + :param language: Language of the SAP system where the table is located. The default value is + EN. Type: string (or Expression with resultType string). + :type language: object + :param system_id: SystemID of the SAP system where the table is located. Type: string (or + Expression with resultType string). + :type system_id: object + :param user_name: Username to access the SAP server where the table is located. Type: string + (or Expression with resultType string). + :type user_name: object + :param password: Password to access the SAP server where the table is located. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param message_server: The hostname of the SAP Message Server. Type: string (or Expression with + resultType string). + :type message_server: object + :param message_server_service: The service name or port number of the Message Server. Type: + string (or Expression with resultType string). + :type message_server_service: object + :param snc_mode: SNC activation indicator to access the SAP server where the table is located. + Must be either 0 (off) or 1 (on). Type: string (or Expression with resultType string). + :type snc_mode: object + :param snc_my_name: Initiator's SNC name to access the SAP server where the table is located. + Type: string (or Expression with resultType string). + :type snc_my_name: object + :param snc_partner_name: Communication partner's SNC name to access the SAP server where the + table is located. Type: string (or Expression with resultType string). + :type snc_partner_name: object + :param snc_library_path: External security product's library to access the SAP server where the + table is located. Type: string (or Expression with resultType string). + :type snc_library_path: object + :param snc_qop: SNC Quality of Protection. Allowed value include: 1, 2, 3, 8, 9. Type: string + (or Expression with resultType string). + :type snc_qop: object + :param logon_group: The Logon Group for the SAP System. Type: string (or Expression with + resultType string). + :type logon_group: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -15174,7 +25229,6 @@ class SalesforceMarketingCloudLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'client_id': {'required': True}, } _attribute_map = { @@ -15184,11 +25238,21 @@ class SalesforceMarketingCloudLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'system_number': {'key': 'typeProperties.systemNumber', 'type': 'object'}, 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'language': {'key': 'typeProperties.language', 'type': 'object'}, + 'system_id': {'key': 'typeProperties.systemId', 'type': 'object'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'message_server': {'key': 'typeProperties.messageServer', 'type': 'object'}, + 'message_server_service': {'key': 'typeProperties.messageServerService', 'type': 'object'}, + 'snc_mode': {'key': 'typeProperties.sncMode', 'type': 'object'}, + 'snc_my_name': {'key': 'typeProperties.sncMyName', 'type': 'object'}, + 'snc_partner_name': {'key': 'typeProperties.sncPartnerName', 'type': 'object'}, + 'snc_library_path': {'key': 'typeProperties.sncLibraryPath', 'type': 'object'}, + 'snc_qop': {'key': 'typeProperties.sncQop', 'type': 'object'}, + 'logon_group': {'key': 'typeProperties.logonGroup', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -15196,18 +25260,65 @@ def __init__( self, **kwargs ): - super(SalesforceMarketingCloudLinkedService, self).__init__(**kwargs) - self.type = 'SalesforceMarketingCloud' - self.client_id = kwargs['client_id'] - self.client_secret = kwargs.get('client_secret', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) + super(SapTableLinkedService, self).__init__(**kwargs) + self.type = 'SapTable' # type: str + self.server = kwargs.get('server', None) + self.system_number = kwargs.get('system_number', None) + self.client_id = kwargs.get('client_id', None) + self.language = kwargs.get('language', None) + self.system_id = kwargs.get('system_id', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.message_server = kwargs.get('message_server', None) + self.message_server_service = kwargs.get('message_server_service', None) + self.snc_mode = kwargs.get('snc_mode', None) + self.snc_my_name = kwargs.get('snc_my_name', None) + self.snc_partner_name = kwargs.get('snc_partner_name', None) + self.snc_library_path = kwargs.get('snc_library_path', None) + self.snc_qop = kwargs.get('snc_qop', None) + self.logon_group = kwargs.get('logon_group', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) -class SalesforceMarketingCloudObjectDataset(Dataset): - """Salesforce Marketing Cloud dataset. +class SapTablePartitionSettings(msrest.serialization.Model): + """The settings that will be leveraged for SAP table source partitioning. + + :param partition_column_name: The name of the column that will be used for proceeding range + partitioning. Type: string (or Expression with resultType string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of column specified in partitionColumnName that + will be used for proceeding range partitioning. Type: string (or Expression with resultType + string). + :type partition_upper_bound: object + :param partition_lower_bound: The minimum value of column specified in partitionColumnName that + will be used for proceeding range partitioning. Type: string (or Expression with resultType + string). + :type partition_lower_bound: object + :param max_partitions_number: The maximum value of partitions the table will be split into. + Type: integer (or Expression with resultType string). + :type max_partitions_number: object + """ + + _attribute_map = { + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + 'max_partitions_number': {'key': 'maxPartitionsNumber', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(SapTablePartitionSettings, self).__init__(**kwargs) + self.partition_column_name = kwargs.get('partition_column_name', None) + self.partition_upper_bound = kwargs.get('partition_upper_bound', None) + self.partition_lower_bound = kwargs.get('partition_lower_bound', None) + self.max_partitions_number = kwargs.get('max_partitions_number', None) + + +class SapTableResourceDataset(Dataset): + """SAP Table Resource properties. All required parameters must be populated in order to send to Azure. @@ -15233,13 +25344,15 @@ class SalesforceMarketingCloudObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). + :param table_name: Required. The name of the SAP Table. Type: string (or Expression with + resultType string). :type table_name: object """ _validation = { 'type': {'required': True}, 'linked_service_name': {'required': True}, + 'table_name': {'required': True}, } _attribute_map = { @@ -15259,331 +25372,338 @@ def __init__( self, **kwargs ): - super(SalesforceMarketingCloudObjectDataset, self).__init__(**kwargs) - self.type = 'SalesforceMarketingCloudObject' - self.table_name = kwargs.get('table_name', None) + super(SapTableResourceDataset, self).__init__(**kwargs) + self.type = 'SapTableResource' # type: str + self.table_name = kwargs['table_name'] + + +class SapTableSource(TabularSource): + """A copy activity source for SAP Table source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param row_count: The number of rows to be retrieved. Type: integer(or Expression with + resultType integer). + :type row_count: object + :param row_skips: The number of rows that will be skipped. Type: integer (or Expression with + resultType integer). + :type row_skips: object + :param rfc_table_fields: The fields of the SAP table that will be retrieved. For example, + column0, column1. Type: string (or Expression with resultType string). + :type rfc_table_fields: object + :param rfc_table_options: The options for the filtering of the SAP Table. For example, COLUMN0 + EQ SOME VALUE. Type: string (or Expression with resultType string). + :type rfc_table_options: object + :param batch_size: Specifies the maximum number of rows that will be retrieved at a time when + retrieving data from SAP Table. Type: integer (or Expression with resultType integer). + :type batch_size: object + :param custom_rfc_read_table_function_module: Specifies the custom RFC function module that + will be used to read data from SAP Table. Type: string (or Expression with resultType string). + :type custom_rfc_read_table_function_module: object + :param partition_option: The partition mechanism that will be used for SAP table read in + parallel. Possible values include: "None", "PartitionOnInt", "PartitionOnCalendarYear", + "PartitionOnCalendarMonth", "PartitionOnCalendarDate", "PartitionOnTime". + :type partition_option: str or ~azure.synapse.artifacts.models.SapTablePartitionOption + :param partition_settings: The settings that will be leveraged for SAP table source + partitioning. + :type partition_settings: ~azure.synapse.artifacts.models.SapTablePartitionSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'row_count': {'key': 'rowCount', 'type': 'object'}, + 'row_skips': {'key': 'rowSkips', 'type': 'object'}, + 'rfc_table_fields': {'key': 'rfcTableFields', 'type': 'object'}, + 'rfc_table_options': {'key': 'rfcTableOptions', 'type': 'object'}, + 'batch_size': {'key': 'batchSize', 'type': 'object'}, + 'custom_rfc_read_table_function_module': {'key': 'customRfcReadTableFunctionModule', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'SapTablePartitionSettings'}, + } + + def __init__( + self, + **kwargs + ): + super(SapTableSource, self).__init__(**kwargs) + self.type = 'SapTableSource' # type: str + self.row_count = kwargs.get('row_count', None) + self.row_skips = kwargs.get('row_skips', None) + self.rfc_table_fields = kwargs.get('rfc_table_fields', None) + self.rfc_table_options = kwargs.get('rfc_table_options', None) + self.batch_size = kwargs.get('batch_size', None) + self.custom_rfc_read_table_function_module = kwargs.get('custom_rfc_read_table_function_module', None) + self.partition_option = kwargs.get('partition_option', None) + self.partition_settings = kwargs.get('partition_settings', None) -class SalesforceObjectDataset(Dataset): - """The Salesforce object dataset. +class ScheduleTrigger(MultiplePipelineTrigger): + """Trigger that creates pipeline runs periodically, on schedule. + + Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Trigger type.Constant filled by server. :type type: str - :param description: Dataset description. + :param description: Trigger description. :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. + :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are + called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". + :vartype runtime_state: str or ~azure.synapse.artifacts.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the trigger. :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param object_api_name: The Salesforce object API name. Type: string (or Expression with - resultType string). - :type object_api_name: object + :param pipelines: Pipelines that need to be started. + :type pipelines: list[~azure.synapse.artifacts.models.TriggerPipelineReference] + :param recurrence: Required. Recurrence schedule configuration. + :type recurrence: ~azure.synapse.artifacts.models.ScheduleTriggerRecurrence """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, + 'runtime_state': {'readonly': True}, + 'recurrence': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'object_api_name': {'key': 'typeProperties.objectApiName', 'type': 'object'}, + 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, + 'recurrence': {'key': 'typeProperties.recurrence', 'type': 'ScheduleTriggerRecurrence'}, } def __init__( self, **kwargs ): - super(SalesforceObjectDataset, self).__init__(**kwargs) - self.type = 'SalesforceObject' - self.object_api_name = kwargs.get('object_api_name', None) - + super(ScheduleTrigger, self).__init__(**kwargs) + self.type = 'ScheduleTrigger' # type: str + self.recurrence = kwargs['recurrence'] -class SalesforceServiceCloudLinkedService(LinkedService): - """Linked service for Salesforce Service Cloud. - All required parameters must be populated in order to send to Azure. +class ScheduleTriggerRecurrence(msrest.serialization.Model): + """The workflow trigger recurrence. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param environment_url: The URL of Salesforce Service Cloud instance. Default is - 'https://login.salesforce.com'. To copy data from sandbox, specify - 'https://test.salesforce.com'. To copy data from custom domain, specify, for example, - 'https://[domain].my.salesforce.com'. Type: string (or Expression with resultType string). - :type environment_url: object - :param username: The username for Basic authentication of the Salesforce instance. Type: string - (or Expression with resultType string). - :type username: object - :param password: The password for Basic authentication of the Salesforce instance. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param security_token: The security token is required to remotely access Salesforce instance. - :type security_token: ~azure.synapse.artifacts.models.SecretBase - :param extended_properties: Extended properties appended to the connection string. Type: string - (or Expression with resultType string). - :type extended_properties: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param frequency: The frequency. Possible values include: "NotSpecified", "Minute", "Hour", + "Day", "Week", "Month", "Year". + :type frequency: str or ~azure.synapse.artifacts.models.RecurrenceFrequency + :param interval: The interval. + :type interval: int + :param start_time: The start time. + :type start_time: ~datetime.datetime + :param end_time: The end time. + :type end_time: ~datetime.datetime + :param time_zone: The time zone. + :type time_zone: str + :param schedule: The recurrence schedule. + :type schedule: ~azure.synapse.artifacts.models.RecurrenceSchedule + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'frequency': {'key': 'frequency', 'type': 'str'}, + 'interval': {'key': 'interval', 'type': 'int'}, + 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, + 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, + 'time_zone': {'key': 'timeZone', 'type': 'str'}, + 'schedule': {'key': 'schedule', 'type': 'RecurrenceSchedule'}, + } + + def __init__( + self, + **kwargs + ): + super(ScheduleTriggerRecurrence, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.frequency = kwargs.get('frequency', None) + self.interval = kwargs.get('interval', None) + self.start_time = kwargs.get('start_time', None) + self.end_time = kwargs.get('end_time', None) + self.time_zone = kwargs.get('time_zone', None) + self.schedule = kwargs.get('schedule', None) + + +class ScriptAction(msrest.serialization.Model): + """Custom script action to run on HDI ondemand cluster once it's up. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. The user provided name of the script action. + :type name: str + :param uri: Required. The URI for the script action. + :type uri: str + :param roles: Required. The node types on which the script action should be executed. Possible + values include: "Headnode", "Workernode", "Zookeeper". + :type roles: str or ~azure.synapse.artifacts.models.HdiNodeTypes + :param parameters: The parameters for the script action. + :type parameters: str """ _validation = { - 'type': {'required': True}, + 'name': {'required': True}, + 'uri': {'required': True}, + 'roles': {'required': True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'environment_url': {'key': 'typeProperties.environmentUrl', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'security_token': {'key': 'typeProperties.securityToken', 'type': 'SecretBase'}, - 'extended_properties': {'key': 'typeProperties.extendedProperties', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'name': {'key': 'name', 'type': 'str'}, + 'uri': {'key': 'uri', 'type': 'str'}, + 'roles': {'key': 'roles', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': 'str'}, } def __init__( self, **kwargs ): - super(SalesforceServiceCloudLinkedService, self).__init__(**kwargs) - self.type = 'SalesforceServiceCloud' - self.environment_url = kwargs.get('environment_url', None) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.security_token = kwargs.get('security_token', None) - self.extended_properties = kwargs.get('extended_properties', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(ScriptAction, self).__init__(**kwargs) + self.name = kwargs['name'] + self.uri = kwargs['uri'] + self.roles = kwargs['roles'] + self.parameters = kwargs.get('parameters', None) -class SalesforceServiceCloudObjectDataset(Dataset): - """The Salesforce Service Cloud object dataset. +class SecureString(SecretBase): + """Azure Synapse secure string definition. The string value will be masked with asterisks '*' during Get or List API calls. All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Type of the secret.Constant filled by server. :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param object_api_name: The Salesforce Service Cloud object API name. Type: string (or - Expression with resultType string). - :type object_api_name: object + :param value: Required. Value of secure string. + :type value: str """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, + 'value': {'required': True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'object_api_name': {'key': 'typeProperties.objectApiName', 'type': 'object'}, + 'value': {'key': 'value', 'type': 'str'}, } def __init__( self, **kwargs ): - super(SalesforceServiceCloudObjectDataset, self).__init__(**kwargs) - self.type = 'SalesforceServiceCloudObject' - self.object_api_name = kwargs.get('object_api_name', None) + super(SecureString, self).__init__(**kwargs) + self.type = 'SecureString' # type: str + self.value = kwargs['value'] -class SapBwCubeDataset(Dataset): - """The SAP BW cube dataset. +class SelfDependencyTumblingWindowTriggerReference(DependencyReference): + """Self referenced tumbling window trigger dependency. All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. The type of dependency reference.Constant filled by server. :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param offset: Required. Timespan applied to the start time of a tumbling window when + evaluating dependency. + :type offset: str + :param size: The size of the window when evaluating the dependency. If undefined the frequency + of the tumbling window will be used. + :type size: str """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, + 'offset': {'required': True, 'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, + 'size': {'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'offset': {'key': 'offset', 'type': 'str'}, + 'size': {'key': 'size', 'type': 'str'}, } def __init__( self, **kwargs ): - super(SapBwCubeDataset, self).__init__(**kwargs) - self.type = 'SapBwCube' + super(SelfDependencyTumblingWindowTriggerReference, self).__init__(**kwargs) + self.type = 'SelfDependencyTumblingWindowTriggerReference' # type: str + self.offset = kwargs['offset'] + self.size = kwargs.get('size', None) -class SapBWLinkedService(LinkedService): - """SAP Business Warehouse Linked Service. +class SelfHostedIntegrationRuntime(IntegrationRuntime): + """Self-hosted integration runtime. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param server: Required. Host name of the SAP BW instance. Type: string (or Expression with - resultType string). - :type server: object - :param system_number: Required. System number of the BW system. (Usually a two-digit decimal - number represented as a string.) Type: string (or Expression with resultType string). - :type system_number: object - :param client_id: Required. Client ID of the client on the BW system. (Usually a three-digit - decimal number represented as a string) Type: string (or Expression with resultType string). - :type client_id: object - :param user_name: Username to access the SAP BW server. Type: string (or Expression with - resultType string). - :type user_name: object - :param password: Password to access the SAP BW server. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param type: Required. Type of integration runtime.Constant filled by server. Possible values + include: "Managed", "SelfHosted". + :type type: str or ~azure.synapse.artifacts.models.IntegrationRuntimeType + :param description: Integration runtime description. + :type description: str + :param linked_info: The base definition of a linked integration runtime. + :type linked_info: ~azure.synapse.artifacts.models.LinkedIntegrationRuntimeType """ _validation = { 'type': {'required': True}, - 'server': {'required': True}, - 'system_number': {'required': True}, - 'client_id': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'system_number': {'key': 'typeProperties.systemNumber', 'type': 'object'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'linked_info': {'key': 'typeProperties.linkedInfo', 'type': 'LinkedIntegrationRuntimeType'}, } def __init__( self, **kwargs ): - super(SapBWLinkedService, self).__init__(**kwargs) - self.type = 'SapBW' - self.server = kwargs['server'] - self.system_number = kwargs['system_number'] - self.client_id = kwargs['client_id'] - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(SelfHostedIntegrationRuntime, self).__init__(**kwargs) + self.type = 'SelfHosted' # type: str + self.linked_info = kwargs.get('linked_info', None) -class SapCloudForCustomerLinkedService(LinkedService): - """Linked service for SAP Cloud for Customer. +class ServiceNowLinkedService(LinkedService): + """ServiceNow server linked service. All required parameters must be populated in order to send to Azure. @@ -15600,24 +25720,42 @@ class SapCloudForCustomerLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param url: Required. The URL of SAP Cloud for Customer OData API. For example, - '[https://[tenantname].crm.ondemand.com/sap/c4c/odata/v1]'. Type: string (or Expression with - resultType string). - :type url: object - :param username: The username for Basic authentication. Type: string (or Expression with - resultType string). + :param endpoint: Required. The endpoint of the ServiceNow server. (i.e. + :code:``.service-now.com). + :type endpoint: object + :param authentication_type: Required. The authentication type to use. Possible values include: + "Basic", "OAuth2". + :type authentication_type: str or ~azure.synapse.artifacts.models.ServiceNowAuthenticationType + :param username: The user name used to connect to the ServiceNow server for Basic and OAuth2 + authentication. :type username: object - :param password: The password for Basic authentication. + :param password: The password corresponding to the user name for Basic and OAuth2 + authentication. :type password: ~azure.synapse.artifacts.models.SecretBase + :param client_id: The client id for OAuth2 authentication. + :type client_id: object + :param client_secret: The client secret for OAuth2 authentication. + :type client_secret: ~azure.synapse.artifacts.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using + HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name in the server's + certificate to match the host name of the server when connecting over SSL. The default value is + true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of the server when + connecting over SSL. The default value is true. + :type use_peer_verification: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Either encryptedCredential or - username/password must be provided. Type: string (or Expression with resultType string). + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, - 'url': {'required': True}, + 'endpoint': {'required': True}, + 'authentication_type': {'required': True}, } _attribute_map = { @@ -15627,9 +25765,15 @@ class SapCloudForCustomerLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -15637,16 +25781,22 @@ def __init__( self, **kwargs ): - super(SapCloudForCustomerLinkedService, self).__init__(**kwargs) - self.type = 'SapCloudForCustomer' - self.url = kwargs['url'] + super(ServiceNowLinkedService, self).__init__(**kwargs) + self.type = 'ServiceNow' # type: str + self.endpoint = kwargs['endpoint'] + self.authentication_type = kwargs['authentication_type'] self.username = kwargs.get('username', None) self.password = kwargs.get('password', None) + self.client_id = kwargs.get('client_id', None) + self.client_secret = kwargs.get('client_secret', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) -class SapCloudForCustomerResourceDataset(Dataset): - """The path of the SAP Cloud for Customer OData entity. +class ServiceNowObjectDataset(Dataset): + """ServiceNow server dataset. All required parameters must be populated in order to send to Azure. @@ -15672,15 +25822,13 @@ class SapCloudForCustomerResourceDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param path: Required. The path of the SAP Cloud for Customer OData entity. Type: string (or - Expression with resultType string). - :type path: object + :param table_name: The table name. Type: string (or Expression with resultType string). + :type table_name: object """ _validation = { 'type': {'required': True}, 'linked_service_name': {'required': True}, - 'path': {'required': True}, } _attribute_map = { @@ -15693,278 +25841,215 @@ class SapCloudForCustomerResourceDataset(Dataset): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'path': {'key': 'typeProperties.path', 'type': 'object'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } def __init__( self, **kwargs ): - super(SapCloudForCustomerResourceDataset, self).__init__(**kwargs) - self.type = 'SapCloudForCustomerResource' - self.path = kwargs['path'] + super(ServiceNowObjectDataset, self).__init__(**kwargs) + self.type = 'ServiceNowObject' # type: str + self.table_name = kwargs.get('table_name', None) -class SapEccLinkedService(LinkedService): - """Linked service for SAP ERP Central Component(SAP ECC). +class ServiceNowSource(TabularSource): + """A copy activity ServiceNow server source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param url: Required. The URL of SAP ECC OData API. For example, - '[https://hostname:port/sap/opu/odata/sap/servicename/]'. Type: string (or Expression with - resultType string). - :type url: str - :param username: The username for Basic authentication. Type: string (or Expression with - resultType string). - :type username: str - :param password: The password for Basic authentication. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Either encryptedCredential or - username/password must be provided. Type: string (or Expression with resultType string). - :type encrypted_credential: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object """ _validation = { 'type': {'required': True}, - 'url': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'url': {'key': 'typeProperties.url', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'str'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, } def __init__( self, **kwargs ): - super(SapEccLinkedService, self).__init__(**kwargs) - self.type = 'SapEcc' - self.url = kwargs['url'] - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(ServiceNowSource, self).__init__(**kwargs) + self.type = 'ServiceNowSource' # type: str + self.query = kwargs.get('query', None) -class SapEccResourceDataset(Dataset): - """The path of the SAP ECC OData entity. +class SetVariableActivity(Activity): + """Set value for a Variable. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. :type type: str - :param description: Dataset description. + :param description: Activity description. :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param path: Required. The path of the SAP ECC OData entity. Type: string (or Expression with - resultType string). - :type path: object + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param variable_name: Name of the variable whose value needs to be set. + :type variable_name: str + :param value: Value to be set. Could be a static value or Expression. + :type value: object """ _validation = { + 'name': {'required': True}, 'type': {'required': True}, - 'linked_service_name': {'required': True}, - 'path': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'path': {'key': 'typeProperties.path', 'type': 'object'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'variable_name': {'key': 'typeProperties.variableName', 'type': 'str'}, + 'value': {'key': 'typeProperties.value', 'type': 'object'}, } def __init__( self, **kwargs ): - super(SapEccResourceDataset, self).__init__(**kwargs) - self.type = 'SapEccResource' - self.path = kwargs['path'] + super(SetVariableActivity, self).__init__(**kwargs) + self.type = 'SetVariable' # type: str + self.variable_name = kwargs.get('variable_name', None) + self.value = kwargs.get('value', None) -class SapHanaLinkedService(LinkedService): - """SAP HANA Linked Service. +class SftpLocation(DatasetLocation): + """The location of SFTP dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Type of dataset storage location.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: SAP HANA ODBC connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param server: Required. Host name of the SAP HANA server. Type: string (or Expression with - resultType string). - :type server: object - :param authentication_type: The authentication type to be used to connect to the SAP HANA - server. Possible values include: "Basic", "Windows". - :type authentication_type: str or ~azure.synapse.artifacts.models.SapHanaAuthenticationType - :param user_name: Username to access the SAP HANA server. Type: string (or Expression with - resultType string). - :type user_name: object - :param password: Password to access the SAP HANA server. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with + :param folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + string). + :type file_name: object """ _validation = { - 'type': {'required': True}, - 'server': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, } def __init__( self, **kwargs ): - super(SapHanaLinkedService, self).__init__(**kwargs) - self.type = 'SapHana' - self.connection_string = kwargs.get('connection_string', None) - self.server = kwargs['server'] - self.authentication_type = kwargs.get('authentication_type', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(SftpLocation, self).__init__(**kwargs) + self.type = 'SftpLocation' # type: str -class SapHanaTableDataset(Dataset): - """SAP HANA Table properties. +class SftpReadSettings(StoreReadSettings): + """Sftp read settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. The read setting type.Constant filled by server. :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param schema_type_properties_schema: The schema name of SAP HANA. Type: string (or Expression + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :type recursive: object + :param wildcard_folder_path: Sftp wildcardFolderPath. Type: string (or Expression with + resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Sftp wildcardFileName. Type: string (or Expression with resultType + string). + :type wildcard_file_name: object + :param modified_datetime_start: The start of file's modified datetime. Type: string (or + Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: object - :param table: The table name of SAP HANA. Type: string (or Expression with resultType string). - :type table: object + :type modified_datetime_end: object """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } def __init__( self, **kwargs ): - super(SapHanaTableDataset, self).__init__(**kwargs) - self.type = 'SapHanaTable' - self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) - self.table = kwargs.get('table', None) + super(SftpReadSettings, self).__init__(**kwargs) + self.type = 'SftpReadSettings' # type: str + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) -class SapOpenHubLinkedService(LinkedService): - """SAP Business Warehouse Open Hub Destination Linked Service. +class SftpServerLinkedService(LinkedService): + """A linked service for an SSH File Transfer Protocol (SFTP) server. All required parameters must be populated in order to send to Azure. @@ -15981,37 +26066,48 @@ class SapOpenHubLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param server: Required. Host name of the SAP BW instance where the open hub destination is - located. Type: string (or Expression with resultType string). - :type server: object - :param system_number: Required. System number of the BW system where the open hub destination - is located. (Usually a two-digit decimal number represented as a string.) Type: string (or - Expression with resultType string). - :type system_number: object - :param client_id: Required. Client ID of the client on the BW system where the open hub - destination is located. (Usually a three-digit decimal number represented as a string) Type: - string (or Expression with resultType string). - :type client_id: object - :param language: Language of the BW system where the open hub destination is located. The - default value is EN. Type: string (or Expression with resultType string). - :type language: object - :param user_name: Username to access the SAP BW server where the open hub destination is - located. Type: string (or Expression with resultType string). + :param host: Required. The SFTP server host name. Type: string (or Expression with resultType + string). + :type host: object + :param port: The TCP port number that the SFTP server uses to listen for client connections. + Default value is 22. Type: integer (or Expression with resultType integer), minimum: 0. + :type port: object + :param authentication_type: The authentication type to be used to connect to the FTP server. + Possible values include: "Basic", "SshPublicKey". + :type authentication_type: str or ~azure.synapse.artifacts.models.SftpAuthenticationType + :param user_name: The username used to log on to the SFTP server. Type: string (or Expression + with resultType string). :type user_name: object - :param password: Password to access the SAP BW server where the open hub destination is - located. + :param password: Password to logon the SFTP server for Basic authentication. :type password: ~azure.synapse.artifacts.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object + :param private_key_path: The SSH private key file path for SshPublicKey authentication. Only + valid for on-premises copy. For on-premises copy with SshPublicKey authentication, either + PrivateKeyPath or PrivateKeyContent should be specified. SSH private key should be OpenSSH + format. Type: string (or Expression with resultType string). + :type private_key_path: object + :param private_key_content: Base64 encoded SSH private key content for SshPublicKey + authentication. For on-premises copy with SshPublicKey authentication, either PrivateKeyPath or + PrivateKeyContent should be specified. SSH private key should be OpenSSH format. + :type private_key_content: ~azure.synapse.artifacts.models.SecretBase + :param pass_phrase: The password to decrypt the SSH private key if the SSH private key is + encrypted. + :type pass_phrase: ~azure.synapse.artifacts.models.SecretBase + :param skip_host_key_validation: If true, skip the SSH host key validation. Default value is + false. Type: boolean (or Expression with resultType boolean). + :type skip_host_key_validation: object + :param host_key_fingerprint: The host key finger-print of the SFTP server. When + SkipHostKeyValidation is false, HostKeyFingerprint should be specified. Type: string (or + Expression with resultType string). + :type host_key_fingerprint: object """ _validation = { 'type': {'required': True}, - 'server': {'required': True}, - 'system_number': {'required': True}, - 'client_id': {'required': True}, + 'host': {'required': True}, } _attribute_map = { @@ -16021,103 +26117,81 @@ class SapOpenHubLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'system_number': {'key': 'typeProperties.systemNumber', 'type': 'object'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'language': {'key': 'typeProperties.language', 'type': 'object'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'private_key_path': {'key': 'typeProperties.privateKeyPath', 'type': 'object'}, + 'private_key_content': {'key': 'typeProperties.privateKeyContent', 'type': 'SecretBase'}, + 'pass_phrase': {'key': 'typeProperties.passPhrase', 'type': 'SecretBase'}, + 'skip_host_key_validation': {'key': 'typeProperties.skipHostKeyValidation', 'type': 'object'}, + 'host_key_fingerprint': {'key': 'typeProperties.hostKeyFingerprint', 'type': 'object'}, } def __init__( self, **kwargs ): - super(SapOpenHubLinkedService, self).__init__(**kwargs) - self.type = 'SapOpenHub' - self.server = kwargs['server'] - self.system_number = kwargs['system_number'] - self.client_id = kwargs['client_id'] - self.language = kwargs.get('language', None) + super(SftpServerLinkedService, self).__init__(**kwargs) + self.type = 'Sftp' # type: str + self.host = kwargs['host'] + self.port = kwargs.get('port', None) + self.authentication_type = kwargs.get('authentication_type', None) self.user_name = kwargs.get('user_name', None) self.password = kwargs.get('password', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.private_key_path = kwargs.get('private_key_path', None) + self.private_key_content = kwargs.get('private_key_content', None) + self.pass_phrase = kwargs.get('pass_phrase', None) + self.skip_host_key_validation = kwargs.get('skip_host_key_validation', None) + self.host_key_fingerprint = kwargs.get('host_key_fingerprint', None) -class SapOpenHubTableDataset(Dataset): - """Sap Business Warehouse Open Hub Destination Table properties. +class SftpWriteSettings(StoreWriteSettings): + """Sftp write settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. The write setting type.Constant filled by server. :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param open_hub_destination_name: Required. The name of the Open Hub Destination with - destination type as Database Table. Type: string (or Expression with resultType string). - :type open_hub_destination_name: object - :param exclude_last_request: Whether to exclude the records of the last request. The default - value is true. Type: boolean (or Expression with resultType boolean). - :type exclude_last_request: object - :param base_request_id: The ID of request for delta loading. Once it is set, only data with - requestId larger than the value of this property will be retrieved. The default value is 0. - Type: integer (or Expression with resultType integer ). - :type base_request_id: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + :param operation_timeout: Specifies the timeout for writing each chunk to SFTP server. Default + value: 01:00:00 (one hour). Type: string (or Expression with resultType string). + :type operation_timeout: object """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, - 'open_hub_destination_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'open_hub_destination_name': {'key': 'typeProperties.openHubDestinationName', 'type': 'object'}, - 'exclude_last_request': {'key': 'typeProperties.excludeLastRequest', 'type': 'object'}, - 'base_request_id': {'key': 'typeProperties.baseRequestId', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'operation_timeout': {'key': 'operationTimeout', 'type': 'object'}, } def __init__( self, **kwargs ): - super(SapOpenHubTableDataset, self).__init__(**kwargs) - self.type = 'SapOpenHubTable' - self.open_hub_destination_name = kwargs['open_hub_destination_name'] - self.exclude_last_request = kwargs.get('exclude_last_request', None) - self.base_request_id = kwargs.get('base_request_id', None) + super(SftpWriteSettings, self).__init__(**kwargs) + self.type = 'SftpWriteSettings' # type: str + self.operation_timeout = kwargs.get('operation_timeout', None) -class SapTableLinkedService(LinkedService): - """SAP Table Linked Service. +class ShopifyLinkedService(LinkedService): + """Shopify Service linked service. All required parameters must be populated in order to send to Azure. @@ -16134,52 +26208,21 @@ class SapTableLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param server: Host name of the SAP instance where the table is located. Type: string (or - Expression with resultType string). - :type server: object - :param system_number: System number of the SAP system where the table is located. (Usually a - two-digit decimal number represented as a string.) Type: string (or Expression with resultType - string). - :type system_number: object - :param client_id: Client ID of the client on the SAP system where the table is located. - (Usually a three-digit decimal number represented as a string) Type: string (or Expression with - resultType string). - :type client_id: object - :param language: Language of the SAP system where the table is located. The default value is - EN. Type: string (or Expression with resultType string). - :type language: object - :param system_id: SystemID of the SAP system where the table is located. Type: string (or - Expression with resultType string). - :type system_id: object - :param user_name: Username to access the SAP server where the table is located. Type: string - (or Expression with resultType string). - :type user_name: object - :param password: Password to access the SAP server where the table is located. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param message_server: The hostname of the SAP Message Server. Type: string (or Expression with - resultType string). - :type message_server: object - :param message_server_service: The service name or port number of the Message Server. Type: - string (or Expression with resultType string). - :type message_server_service: object - :param snc_mode: SNC activation indicator to access the SAP server where the table is located. - Must be either 0 (off) or 1 (on). Type: string (or Expression with resultType string). - :type snc_mode: object - :param snc_my_name: Initiator's SNC name to access the SAP server where the table is located. - Type: string (or Expression with resultType string). - :type snc_my_name: object - :param snc_partner_name: Communication partner's SNC name to access the SAP server where the - table is located. Type: string (or Expression with resultType string). - :type snc_partner_name: object - :param snc_library_path: External security product's library to access the SAP server where the - table is located. Type: string (or Expression with resultType string). - :type snc_library_path: object - :param snc_qop: SNC Quality of Protection. Allowed value include: 1, 2, 3, 8, 9. Type: string - (or Expression with resultType string). - :type snc_qop: object - :param logon_group: The Logon Group for the SAP System. Type: string (or Expression with - resultType string). - :type logon_group: object + :param host: Required. The endpoint of the Shopify server. (i.e. mystore.myshopify.com). + :type host: object + :param access_token: The API access token that can be used to access Shopify’s data. The token + won't expire if it is offline mode. + :type access_token: ~azure.synapse.artifacts.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using + HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name in the server's + certificate to match the host name of the server when connecting over SSL. The default value is + true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of the server when + connecting over SSL. The default value is true. + :type use_peer_verification: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -16188,6 +26231,7 @@ class SapTableLinkedService(LinkedService): _validation = { 'type': {'required': True}, + 'host': {'required': True}, } _attribute_map = { @@ -16197,21 +26241,11 @@ class SapTableLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'system_number': {'key': 'typeProperties.systemNumber', 'type': 'object'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'language': {'key': 'typeProperties.language', 'type': 'object'}, - 'system_id': {'key': 'typeProperties.systemId', 'type': 'object'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'message_server': {'key': 'typeProperties.messageServer', 'type': 'object'}, - 'message_server_service': {'key': 'typeProperties.messageServerService', 'type': 'object'}, - 'snc_mode': {'key': 'typeProperties.sncMode', 'type': 'object'}, - 'snc_my_name': {'key': 'typeProperties.sncMyName', 'type': 'object'}, - 'snc_partner_name': {'key': 'typeProperties.sncPartnerName', 'type': 'object'}, - 'snc_library_path': {'key': 'typeProperties.sncLibraryPath', 'type': 'object'}, - 'snc_qop': {'key': 'typeProperties.sncQop', 'type': 'object'}, - 'logon_group': {'key': 'typeProperties.logonGroup', 'type': 'object'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -16219,28 +26253,18 @@ def __init__( self, **kwargs ): - super(SapTableLinkedService, self).__init__(**kwargs) - self.type = 'SapTable' - self.server = kwargs.get('server', None) - self.system_number = kwargs.get('system_number', None) - self.client_id = kwargs.get('client_id', None) - self.language = kwargs.get('language', None) - self.system_id = kwargs.get('system_id', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.message_server = kwargs.get('message_server', None) - self.message_server_service = kwargs.get('message_server_service', None) - self.snc_mode = kwargs.get('snc_mode', None) - self.snc_my_name = kwargs.get('snc_my_name', None) - self.snc_partner_name = kwargs.get('snc_partner_name', None) - self.snc_library_path = kwargs.get('snc_library_path', None) - self.snc_qop = kwargs.get('snc_qop', None) - self.logon_group = kwargs.get('logon_group', None) + super(ShopifyLinkedService, self).__init__(**kwargs) + self.type = 'Shopify' # type: str + self.host = kwargs['host'] + self.access_token = kwargs.get('access_token', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) -class SapTableResourceDataset(Dataset): - """SAP Table Resource properties. +class ShopifyObjectDataset(Dataset): + """Shopify Service dataset. All required parameters must be populated in order to send to Azure. @@ -16266,15 +26290,13 @@ class SapTableResourceDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: Required. The name of the SAP Table. Type: string (or Expression with - resultType string). + :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object """ _validation = { 'type': {'required': True}, 'linked_service_name': {'required': True}, - 'table_name': {'required': True}, } _attribute_map = { @@ -16294,385 +26316,440 @@ def __init__( self, **kwargs ): - super(SapTableResourceDataset, self).__init__(**kwargs) - self.type = 'SapTableResource' - self.table_name = kwargs['table_name'] + super(ShopifyObjectDataset, self).__init__(**kwargs) + self.type = 'ShopifyObject' # type: str + self.table_name = kwargs.get('table_name', None) -class ScriptAction(msrest.serialization.Model): - """Custom script action to run on HDI ondemand cluster once it's up. +class ShopifySource(TabularSource): + """A copy activity Shopify Service source. All required parameters must be populated in order to send to Azure. - :param name: Required. The user provided name of the script action. - :type name: str - :param uri: Required. The URI for the script action. - :type uri: str - :param roles: Required. The node types on which the script action should be executed. Possible - values include: "Headnode", "Workernode", "Zookeeper". - :type roles: str or ~azure.synapse.artifacts.models.HdiNodeTypes - :param parameters: The parameters for the script action. - :type parameters: str + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object """ _validation = { - 'name': {'required': True}, - 'uri': {'required': True}, - 'roles': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, } + def __init__( + self, + **kwargs + ): + super(ShopifySource, self).__init__(**kwargs) + self.type = 'ShopifySource' # type: str + self.query = kwargs.get('query', None) + + +class Sku(msrest.serialization.Model): + """SQL pool SKU. + + :param tier: The service tier. + :type tier: str + :param name: The SKU name. + :type name: str + :param capacity: If the SKU supports scale out/in then the capacity integer should be included. + If scale out/in is not possible for the resource this may be omitted. + :type capacity: int + """ + _attribute_map = { + 'tier': {'key': 'tier', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, - 'uri': {'key': 'uri', 'type': 'str'}, - 'roles': {'key': 'roles', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': 'str'}, + 'capacity': {'key': 'capacity', 'type': 'int'}, } def __init__( self, **kwargs ): - super(ScriptAction, self).__init__(**kwargs) - self.name = kwargs['name'] - self.uri = kwargs['uri'] - self.roles = kwargs['roles'] - self.parameters = kwargs.get('parameters', None) + super(Sku, self).__init__(**kwargs) + self.tier = kwargs.get('tier', None) + self.name = kwargs.get('name', None) + self.capacity = kwargs.get('capacity', None) -class SecureString(SecretBase): - """Azure Synapse secure string definition. The string value will be masked with asterisks '*' during Get or List API calls. +class SparkBatchJob(msrest.serialization.Model): + """SparkBatchJob. All required parameters must be populated in order to send to Azure. - :param type: Required. Type of the secret.Constant filled by server. - :type type: str - :param value: Required. Value of secure string. - :type value: str + :param livy_info: + :type livy_info: ~azure.synapse.artifacts.models.SparkBatchJobState + :param name: The batch name. + :type name: str + :param workspace_name: The workspace name. + :type workspace_name: str + :param spark_pool_name: The Spark pool name. + :type spark_pool_name: str + :param submitter_name: The submitter name. + :type submitter_name: str + :param submitter_id: The submitter identifier. + :type submitter_id: str + :param artifact_id: The artifact identifier. + :type artifact_id: str + :param job_type: The job type. Possible values include: "SparkBatch", "SparkSession". + :type job_type: str or ~azure.synapse.artifacts.models.SparkJobType + :param result: The Spark batch job result. Possible values include: "Uncertain", "Succeeded", + "Failed", "Cancelled". + :type result: str or ~azure.synapse.artifacts.models.SparkBatchJobResultType + :param scheduler: The scheduler information. + :type scheduler: ~azure.synapse.artifacts.models.SparkScheduler + :param plugin: The plugin information. + :type plugin: ~azure.synapse.artifacts.models.SparkServicePlugin + :param errors: The error information. + :type errors: list[~azure.synapse.artifacts.models.SparkServiceError] + :param tags: A set of tags. The tags. + :type tags: dict[str, str] + :param id: Required. The session Id. + :type id: int + :param app_id: The application id of this session. + :type app_id: str + :param app_info: The detailed application info. + :type app_info: dict[str, str] + :param state: The batch state. + :type state: str + :param log_lines: The log lines. + :type log_lines: list[str] """ _validation = { - 'type': {'required': True}, - 'value': {'required': True}, + 'id': {'required': True}, } _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'str'}, + 'livy_info': {'key': 'livyInfo', 'type': 'SparkBatchJobState'}, + 'name': {'key': 'name', 'type': 'str'}, + 'workspace_name': {'key': 'workspaceName', 'type': 'str'}, + 'spark_pool_name': {'key': 'sparkPoolName', 'type': 'str'}, + 'submitter_name': {'key': 'submitterName', 'type': 'str'}, + 'submitter_id': {'key': 'submitterId', 'type': 'str'}, + 'artifact_id': {'key': 'artifactId', 'type': 'str'}, + 'job_type': {'key': 'jobType', 'type': 'str'}, + 'result': {'key': 'result', 'type': 'str'}, + 'scheduler': {'key': 'schedulerInfo', 'type': 'SparkScheduler'}, + 'plugin': {'key': 'pluginInfo', 'type': 'SparkServicePlugin'}, + 'errors': {'key': 'errorInfo', 'type': '[SparkServiceError]'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'id': {'key': 'id', 'type': 'int'}, + 'app_id': {'key': 'appId', 'type': 'str'}, + 'app_info': {'key': 'appInfo', 'type': '{str}'}, + 'state': {'key': 'state', 'type': 'str'}, + 'log_lines': {'key': 'log', 'type': '[str]'}, } def __init__( self, **kwargs ): - super(SecureString, self).__init__(**kwargs) - self.type = 'SecureString' - self.value = kwargs['value'] + super(SparkBatchJob, self).__init__(**kwargs) + self.livy_info = kwargs.get('livy_info', None) + self.name = kwargs.get('name', None) + self.workspace_name = kwargs.get('workspace_name', None) + self.spark_pool_name = kwargs.get('spark_pool_name', None) + self.submitter_name = kwargs.get('submitter_name', None) + self.submitter_id = kwargs.get('submitter_id', None) + self.artifact_id = kwargs.get('artifact_id', None) + self.job_type = kwargs.get('job_type', None) + self.result = kwargs.get('result', None) + self.scheduler = kwargs.get('scheduler', None) + self.plugin = kwargs.get('plugin', None) + self.errors = kwargs.get('errors', None) + self.tags = kwargs.get('tags', None) + self.id = kwargs['id'] + self.app_id = kwargs.get('app_id', None) + self.app_info = kwargs.get('app_info', None) + self.state = kwargs.get('state', None) + self.log_lines = kwargs.get('log_lines', None) + + +class SparkBatchJobState(msrest.serialization.Model): + """SparkBatchJobState. + + :param not_started_at: the time that at which "not_started" livy state was first seen. + :type not_started_at: ~datetime.datetime + :param starting_at: the time that at which "starting" livy state was first seen. + :type starting_at: ~datetime.datetime + :param running_at: the time that at which "running" livy state was first seen. + :type running_at: ~datetime.datetime + :param dead_at: time that at which "dead" livy state was first seen. + :type dead_at: ~datetime.datetime + :param success_at: the time that at which "success" livy state was first seen. + :type success_at: ~datetime.datetime + :param terminated_at: the time that at which "killed" livy state was first seen. + :type terminated_at: ~datetime.datetime + :param recovering_at: the time that at which "recovering" livy state was first seen. + :type recovering_at: ~datetime.datetime + :param current_state: the Spark job state. + :type current_state: str + :param job_creation_request: + :type job_creation_request: ~azure.synapse.artifacts.models.SparkRequest + """ + + _attribute_map = { + 'not_started_at': {'key': 'notStartedAt', 'type': 'iso-8601'}, + 'starting_at': {'key': 'startingAt', 'type': 'iso-8601'}, + 'running_at': {'key': 'runningAt', 'type': 'iso-8601'}, + 'dead_at': {'key': 'deadAt', 'type': 'iso-8601'}, + 'success_at': {'key': 'successAt', 'type': 'iso-8601'}, + 'terminated_at': {'key': 'killedAt', 'type': 'iso-8601'}, + 'recovering_at': {'key': 'recoveringAt', 'type': 'iso-8601'}, + 'current_state': {'key': 'currentState', 'type': 'str'}, + 'job_creation_request': {'key': 'jobCreationRequest', 'type': 'SparkRequest'}, + } + + def __init__( + self, + **kwargs + ): + super(SparkBatchJobState, self).__init__(**kwargs) + self.not_started_at = kwargs.get('not_started_at', None) + self.starting_at = kwargs.get('starting_at', None) + self.running_at = kwargs.get('running_at', None) + self.dead_at = kwargs.get('dead_at', None) + self.success_at = kwargs.get('success_at', None) + self.terminated_at = kwargs.get('terminated_at', None) + self.recovering_at = kwargs.get('recovering_at', None) + self.current_state = kwargs.get('current_state', None) + self.job_creation_request = kwargs.get('job_creation_request', None) -class ServiceNowLinkedService(LinkedService): - """ServiceNow server linked service. +class SparkJobDefinition(msrest.serialization.Model): + """Spark job definition. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. + :param description: The description of the Spark job definition. :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param endpoint: Required. The endpoint of the ServiceNow server. (i.e. - :code:``.service-now.com). - :type endpoint: object - :param authentication_type: Required. The authentication type to use. Possible values include: - "Basic", "OAuth2". - :type authentication_type: str or ~azure.synapse.artifacts.models.ServiceNowAuthenticationType - :param username: The user name used to connect to the ServiceNow server for Basic and OAuth2 - authentication. - :type username: object - :param password: The password corresponding to the user name for Basic and OAuth2 - authentication. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param client_id: The client id for OAuth2 authentication. - :type client_id: object - :param client_secret: The client secret for OAuth2 authentication. - :type client_secret: ~azure.synapse.artifacts.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using - HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name in the server's - certificate to match the host name of the server when connecting over SSL. The default value is - true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of the server when - connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param target_big_data_pool: Required. Big data pool reference. + :type target_big_data_pool: ~azure.synapse.artifacts.models.BigDataPoolReference + :param required_spark_version: The required Spark version of the application. + :type required_spark_version: str + :param language: The language of the Spark application. + :type language: str + :param job_properties: Required. The properties of the Spark job. + :type job_properties: ~azure.synapse.artifacts.models.SparkJobProperties """ _validation = { - 'type': {'required': True}, - 'endpoint': {'required': True}, - 'authentication_type': {'required': True}, + 'target_big_data_pool': {'required': True}, + 'job_properties': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'target_big_data_pool': {'key': 'targetBigDataPool', 'type': 'BigDataPoolReference'}, + 'required_spark_version': {'key': 'requiredSparkVersion', 'type': 'str'}, + 'language': {'key': 'language', 'type': 'str'}, + 'job_properties': {'key': 'jobProperties', 'type': 'SparkJobProperties'}, } def __init__( self, **kwargs ): - super(ServiceNowLinkedService, self).__init__(**kwargs) - self.type = 'ServiceNow' - self.endpoint = kwargs['endpoint'] - self.authentication_type = kwargs['authentication_type'] - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.client_id = kwargs.get('client_id', None) - self.client_secret = kwargs.get('client_secret', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(SparkJobDefinition, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.description = kwargs.get('description', None) + self.target_big_data_pool = kwargs['target_big_data_pool'] + self.required_spark_version = kwargs.get('required_spark_version', None) + self.language = kwargs.get('language', None) + self.job_properties = kwargs['job_properties'] -class ServiceNowObjectDataset(Dataset): - """ServiceNow server dataset. +class SparkJobDefinitionResource(AzureEntityResource): + """Spark job definition resource type. + + Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + :ivar etag: Resource Etag. + :vartype etag: str + :param properties: Required. Properties of spark job definition. + :type properties: ~azure.synapse.artifacts.models.SparkJobDefinition """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'SparkJobDefinition'}, } def __init__( self, **kwargs ): - super(ServiceNowObjectDataset, self).__init__(**kwargs) - self.type = 'ServiceNowObject' - self.table_name = kwargs.get('table_name', None) + super(SparkJobDefinitionResource, self).__init__(**kwargs) + self.properties = kwargs['properties'] -class SetVariableActivity(Activity): - """Set value for a Variable. +class SparkJobDefinitionsListResponse(msrest.serialization.Model): + """A list of spark job definitions resources. All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param variable_name: Name of the variable whose value needs to be set. - :type variable_name: str - :param value: Value to be set. Could be a static value or Expression. - :type value: object + :param value: Required. List of spark job definitions. + :type value: list[~azure.synapse.artifacts.models.SparkJobDefinitionResource] + :param next_link: The link to the next page of results, if any remaining results exist. + :type next_link: str """ _validation = { - 'name': {'required': True}, - 'type': {'required': True}, + 'value': {'required': True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'variable_name': {'key': 'typeProperties.variableName', 'type': 'str'}, - 'value': {'key': 'typeProperties.value', 'type': 'object'}, + 'value': {'key': 'value', 'type': '[SparkJobDefinitionResource]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, } def __init__( self, **kwargs ): - super(SetVariableActivity, self).__init__(**kwargs) - self.type = 'SetVariable' - self.variable_name = kwargs.get('variable_name', None) - self.value = kwargs.get('value', None) + super(SparkJobDefinitionsListResponse, self).__init__(**kwargs) + self.value = kwargs['value'] + self.next_link = kwargs.get('next_link', None) -class SftpServerLinkedService(LinkedService): - """A linked service for an SSH File Transfer Protocol (SFTP) server. +class SparkJobProperties(msrest.serialization.Model): + """The properties of the Spark job. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param host: Required. The SFTP server host name. Type: string (or Expression with resultType - string). - :type host: object - :param port: The TCP port number that the SFTP server uses to listen for client connections. - Default value is 22. Type: integer (or Expression with resultType integer), minimum: 0. - :type port: object - :param authentication_type: The authentication type to be used to connect to the FTP server. - Possible values include: "Basic", "SshPublicKey". - :type authentication_type: str or ~azure.synapse.artifacts.models.SftpAuthenticationType - :param user_name: The username used to log on to the SFTP server. Type: string (or Expression - with resultType string). - :type user_name: object - :param password: Password to logon the SFTP server for Basic authentication. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - :param private_key_path: The SSH private key file path for SshPublicKey authentication. Only - valid for on-premises copy. For on-premises copy with SshPublicKey authentication, either - PrivateKeyPath or PrivateKeyContent should be specified. SSH private key should be OpenSSH - format. Type: string (or Expression with resultType string). - :type private_key_path: object - :param private_key_content: Base64 encoded SSH private key content for SshPublicKey - authentication. For on-premises copy with SshPublicKey authentication, either PrivateKeyPath or - PrivateKeyContent should be specified. SSH private key should be OpenSSH format. - :type private_key_content: ~azure.synapse.artifacts.models.SecretBase - :param pass_phrase: The password to decrypt the SSH private key if the SSH private key is - encrypted. - :type pass_phrase: ~azure.synapse.artifacts.models.SecretBase - :param skip_host_key_validation: If true, skip the SSH host key validation. Default value is - false. Type: boolean (or Expression with resultType boolean). - :type skip_host_key_validation: object - :param host_key_fingerprint: The host key finger-print of the SFTP server. When - SkipHostKeyValidation is false, HostKeyFingerprint should be specified. Type: string (or - Expression with resultType string). - :type host_key_fingerprint: object + :param name: The name of the job. + :type name: str + :param file: Required. File containing the application to execute. + :type file: str + :param class_name: Main class for Java/Scala application. + :type class_name: str + :param conf: Spark configuration properties. + :type conf: object + :param args: Command line arguments for the application. + :type args: list[str] + :param jars: Jars to be used in this job. + :type jars: list[str] + :param files: files to be used in this job. + :type files: list[str] + :param archives: Archives to be used in this job. + :type archives: list[str] + :param driver_memory: Required. Amount of memory to use for the driver process. + :type driver_memory: str + :param driver_cores: Required. Number of cores to use for the driver. + :type driver_cores: int + :param executor_memory: Required. Amount of memory to use per executor process. + :type executor_memory: str + :param executor_cores: Required. Number of cores to use for each executor. + :type executor_cores: int + :param num_executors: Required. Number of executors to launch for this job. + :type num_executors: int """ _validation = { - 'type': {'required': True}, - 'host': {'required': True}, + 'file': {'required': True}, + 'driver_memory': {'required': True}, + 'driver_cores': {'required': True}, + 'executor_memory': {'required': True}, + 'executor_cores': {'required': True}, + 'num_executors': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'private_key_path': {'key': 'typeProperties.privateKeyPath', 'type': 'object'}, - 'private_key_content': {'key': 'typeProperties.privateKeyContent', 'type': 'SecretBase'}, - 'pass_phrase': {'key': 'typeProperties.passPhrase', 'type': 'SecretBase'}, - 'skip_host_key_validation': {'key': 'typeProperties.skipHostKeyValidation', 'type': 'object'}, - 'host_key_fingerprint': {'key': 'typeProperties.hostKeyFingerprint', 'type': 'object'}, + 'name': {'key': 'name', 'type': 'str'}, + 'file': {'key': 'file', 'type': 'str'}, + 'class_name': {'key': 'className', 'type': 'str'}, + 'conf': {'key': 'conf', 'type': 'object'}, + 'args': {'key': 'args', 'type': '[str]'}, + 'jars': {'key': 'jars', 'type': '[str]'}, + 'files': {'key': 'files', 'type': '[str]'}, + 'archives': {'key': 'archives', 'type': '[str]'}, + 'driver_memory': {'key': 'driverMemory', 'type': 'str'}, + 'driver_cores': {'key': 'driverCores', 'type': 'int'}, + 'executor_memory': {'key': 'executorMemory', 'type': 'str'}, + 'executor_cores': {'key': 'executorCores', 'type': 'int'}, + 'num_executors': {'key': 'numExecutors', 'type': 'int'}, } def __init__( self, **kwargs ): - super(SftpServerLinkedService, self).__init__(**kwargs) - self.type = 'Sftp' - self.host = kwargs['host'] - self.port = kwargs.get('port', None) - self.authentication_type = kwargs.get('authentication_type', None) - self.user_name = kwargs.get('user_name', None) - self.password = kwargs.get('password', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.private_key_path = kwargs.get('private_key_path', None) - self.private_key_content = kwargs.get('private_key_content', None) - self.pass_phrase = kwargs.get('pass_phrase', None) - self.skip_host_key_validation = kwargs.get('skip_host_key_validation', None) - self.host_key_fingerprint = kwargs.get('host_key_fingerprint', None) + super(SparkJobProperties, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.name = kwargs.get('name', None) + self.file = kwargs['file'] + self.class_name = kwargs.get('class_name', None) + self.conf = kwargs.get('conf', None) + self.args = kwargs.get('args', None) + self.jars = kwargs.get('jars', None) + self.files = kwargs.get('files', None) + self.archives = kwargs.get('archives', None) + self.driver_memory = kwargs['driver_memory'] + self.driver_cores = kwargs['driver_cores'] + self.executor_memory = kwargs['executor_memory'] + self.executor_cores = kwargs['executor_cores'] + self.num_executors = kwargs['num_executors'] -class ShopifyLinkedService(LinkedService): - """Shopify Service linked service. +class SparkLinkedService(LinkedService): + """Spark Server linked service. All required parameters must be populated in order to send to Azure. @@ -16689,21 +26766,45 @@ class ShopifyLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param host: Required. The endpoint of the Shopify server. (i.e. mystore.myshopify.com). + :param host: Required. IP address or host name of the Spark server. :type host: object - :param access_token: The API access token that can be used to access Shopify’s data. The token - won't expire if it is offline mode. - :type access_token: ~azure.synapse.artifacts.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using - HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name in the server's - certificate to match the host name of the server when connecting over SSL. The default value is - true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of the server when - connecting over SSL. The default value is true. - :type use_peer_verification: object + :param port: Required. The TCP port that the Spark server uses to listen for client + connections. + :type port: object + :param server_type: The type of Spark server. Possible values include: "SharkServer", + "SharkServer2", "SparkThriftServer". + :type server_type: str or ~azure.synapse.artifacts.models.SparkServerType + :param thrift_transport_protocol: The transport protocol to use in the Thrift layer. Possible + values include: "Binary", "SASL", "HTTP ". + :type thrift_transport_protocol: str or + ~azure.synapse.artifacts.models.SparkThriftTransportProtocol + :param authentication_type: Required. The authentication method used to access the Spark + server. Possible values include: "Anonymous", "Username", "UsernameAndPassword", + "WindowsAzureHDInsightService". + :type authentication_type: str or ~azure.synapse.artifacts.models.SparkAuthenticationType + :param username: The user name that you use to access Spark Server. + :type username: object + :param password: The password corresponding to the user name that you provided in the Username + field. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param http_path: The partial URL corresponding to the Spark server. + :type http_path: object + :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The + default value is false. + :type enable_ssl: object + :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for + verifying the server when connecting over SSL. This property can only be set when using SSL on + self-hosted IR. The default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust + store or from a specified PEM file. The default value is false. + :type use_system_trust_store: object + :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate + name to match the host name of the server when connecting over SSL. The default value is false. + :type allow_host_name_cn_mismatch: object + :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from + the server. The default value is false. + :type allow_self_signed_server_cert: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -16713,6 +26814,8 @@ class ShopifyLinkedService(LinkedService): _validation = { 'type': {'required': True}, 'host': {'required': True}, + 'port': {'required': True}, + 'authentication_type': {'required': True}, } _attribute_map = { @@ -16723,10 +26826,18 @@ class ShopifyLinkedService(LinkedService): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'server_type': {'key': 'typeProperties.serverType', 'type': 'str'}, + 'thrift_transport_protocol': {'key': 'typeProperties.thriftTransportProtocol', 'type': 'str'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, + 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -16734,18 +26845,26 @@ def __init__( self, **kwargs ): - super(ShopifyLinkedService, self).__init__(**kwargs) - self.type = 'Shopify' + super(SparkLinkedService, self).__init__(**kwargs) + self.type = 'Spark' # type: str self.host = kwargs['host'] - self.access_token = kwargs.get('access_token', None) - self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) - self.use_host_verification = kwargs.get('use_host_verification', None) - self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.port = kwargs['port'] + self.server_type = kwargs.get('server_type', None) + self.thrift_transport_protocol = kwargs.get('thrift_transport_protocol', None) + self.authentication_type = kwargs['authentication_type'] + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.http_path = kwargs.get('http_path', None) + self.enable_ssl = kwargs.get('enable_ssl', None) + self.trusted_cert_path = kwargs.get('trusted_cert_path', None) + self.use_system_trust_store = kwargs.get('use_system_trust_store', None) + self.allow_host_name_cn_mismatch = kwargs.get('allow_host_name_cn_mismatch', None) + self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) -class ShopifyObjectDataset(Dataset): - """Shopify Service dataset. +class SparkObjectDataset(Dataset): + """Spark Server dataset. All required parameters must be populated in order to send to Azure. @@ -16771,8 +26890,14 @@ class ShopifyObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). + :param table_name: This property will be retired. Please consider using schema + table + properties instead. :type table_name: object + :param table: The table name of the Spark. Type: string (or Expression with resultType string). + :type table: object + :param schema_type_properties_schema: The schema name of the Spark. Type: string (or Expression + with resultType string). + :type schema_type_properties_schema: object """ _validation = { @@ -16791,761 +26916,762 @@ class ShopifyObjectDataset(Dataset): 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } def __init__( self, **kwargs ): - super(ShopifyObjectDataset, self).__init__(**kwargs) - self.type = 'ShopifyObject' + super(SparkObjectDataset, self).__init__(**kwargs) + self.type = 'SparkObject' # type: str self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) + self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) -class SparkBatchJob(msrest.serialization.Model): - """SparkBatchJob. - - All required parameters must be populated in order to send to Azure. +class SparkRequest(msrest.serialization.Model): + """SparkRequest. - :param livy_info: - :type livy_info: ~azure.synapse.artifacts.models.SparkBatchJobState - :param name: The batch name. + :param name: :type name: str - :param workspace_name: The workspace name. - :type workspace_name: str - :param spark_pool_name: The Spark pool name. - :type spark_pool_name: str - :param submitter_name: The submitter name. - :type submitter_name: str - :param submitter_id: The submitter identifier. - :type submitter_id: str - :param artifact_id: The artifact identifier. - :type artifact_id: str - :param job_type: The job type. Possible values include: "SparkBatch", "SparkSession". - :type job_type: str or ~azure.synapse.artifacts.models.SparkJobType - :param result: The Spark batch job result. Possible values include: "Uncertain", "Succeeded", - "Failed", "Cancelled". - :type result: str or ~azure.synapse.artifacts.models.SparkBatchJobResultType - :param scheduler: The scheduler information. - :type scheduler: ~azure.synapse.artifacts.models.SparkScheduler - :param plugin: The plugin information. - :type plugin: ~azure.synapse.artifacts.models.SparkServicePlugin - :param errors: The error information. - :type errors: list[~azure.synapse.artifacts.models.SparkServiceError] - :param tags: A set of tags. The tags. - :type tags: dict[str, str] - :param id: Required. The session Id. - :type id: int - :param app_id: The application id of this session. - :type app_id: str - :param app_info: The detailed application info. - :type app_info: dict[str, str] - :param state: The batch state. - :type state: str - :param log_lines: The log lines. - :type log_lines: list[str] + :param file: + :type file: str + :param class_name: + :type class_name: str + :param arguments: + :type arguments: list[str] + :param jars: + :type jars: list[str] + :param python_files: + :type python_files: list[str] + :param files: + :type files: list[str] + :param archives: + :type archives: list[str] + :param configuration: Dictionary of :code:``. + :type configuration: dict[str, str] + :param driver_memory: + :type driver_memory: str + :param driver_cores: + :type driver_cores: int + :param executor_memory: + :type executor_memory: str + :param executor_cores: + :type executor_cores: int + :param executor_count: + :type executor_count: int """ - _validation = { - 'id': {'required': True}, - } - _attribute_map = { - 'livy_info': {'key': 'livyInfo', 'type': 'SparkBatchJobState'}, 'name': {'key': 'name', 'type': 'str'}, - 'workspace_name': {'key': 'workspaceName', 'type': 'str'}, - 'spark_pool_name': {'key': 'sparkPoolName', 'type': 'str'}, - 'submitter_name': {'key': 'submitterName', 'type': 'str'}, - 'submitter_id': {'key': 'submitterId', 'type': 'str'}, - 'artifact_id': {'key': 'artifactId', 'type': 'str'}, - 'job_type': {'key': 'jobType', 'type': 'str'}, - 'result': {'key': 'result', 'type': 'str'}, - 'scheduler': {'key': 'schedulerInfo', 'type': 'SparkScheduler'}, - 'plugin': {'key': 'pluginInfo', 'type': 'SparkServicePlugin'}, - 'errors': {'key': 'errorInfo', 'type': '[SparkServiceError]'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'id': {'key': 'id', 'type': 'int'}, - 'app_id': {'key': 'appId', 'type': 'str'}, - 'app_info': {'key': 'appInfo', 'type': '{str}'}, - 'state': {'key': 'state', 'type': 'str'}, - 'log_lines': {'key': 'log', 'type': '[str]'}, + 'file': {'key': 'file', 'type': 'str'}, + 'class_name': {'key': 'className', 'type': 'str'}, + 'arguments': {'key': 'args', 'type': '[str]'}, + 'jars': {'key': 'jars', 'type': '[str]'}, + 'python_files': {'key': 'pyFiles', 'type': '[str]'}, + 'files': {'key': 'files', 'type': '[str]'}, + 'archives': {'key': 'archives', 'type': '[str]'}, + 'configuration': {'key': 'conf', 'type': '{str}'}, + 'driver_memory': {'key': 'driverMemory', 'type': 'str'}, + 'driver_cores': {'key': 'driverCores', 'type': 'int'}, + 'executor_memory': {'key': 'executorMemory', 'type': 'str'}, + 'executor_cores': {'key': 'executorCores', 'type': 'int'}, + 'executor_count': {'key': 'numExecutors', 'type': 'int'}, } def __init__( self, **kwargs ): - super(SparkBatchJob, self).__init__(**kwargs) - self.livy_info = kwargs.get('livy_info', None) + super(SparkRequest, self).__init__(**kwargs) self.name = kwargs.get('name', None) - self.workspace_name = kwargs.get('workspace_name', None) - self.spark_pool_name = kwargs.get('spark_pool_name', None) - self.submitter_name = kwargs.get('submitter_name', None) - self.submitter_id = kwargs.get('submitter_id', None) - self.artifact_id = kwargs.get('artifact_id', None) - self.job_type = kwargs.get('job_type', None) - self.result = kwargs.get('result', None) - self.scheduler = kwargs.get('scheduler', None) - self.plugin = kwargs.get('plugin', None) - self.errors = kwargs.get('errors', None) - self.tags = kwargs.get('tags', None) - self.id = kwargs['id'] - self.app_id = kwargs.get('app_id', None) - self.app_info = kwargs.get('app_info', None) - self.state = kwargs.get('state', None) - self.log_lines = kwargs.get('log_lines', None) + self.file = kwargs.get('file', None) + self.class_name = kwargs.get('class_name', None) + self.arguments = kwargs.get('arguments', None) + self.jars = kwargs.get('jars', None) + self.python_files = kwargs.get('python_files', None) + self.files = kwargs.get('files', None) + self.archives = kwargs.get('archives', None) + self.configuration = kwargs.get('configuration', None) + self.driver_memory = kwargs.get('driver_memory', None) + self.driver_cores = kwargs.get('driver_cores', None) + self.executor_memory = kwargs.get('executor_memory', None) + self.executor_cores = kwargs.get('executor_cores', None) + self.executor_count = kwargs.get('executor_count', None) -class SparkBatchJobState(msrest.serialization.Model): - """SparkBatchJobState. +class SparkScheduler(msrest.serialization.Model): + """SparkScheduler. - :param not_started_at: the time that at which "not_started" livy state was first seen. - :type not_started_at: ~datetime.datetime - :param starting_at: the time that at which "starting" livy state was first seen. - :type starting_at: ~datetime.datetime - :param running_at: the time that at which "running" livy state was first seen. - :type running_at: ~datetime.datetime - :param dead_at: time that at which "dead" livy state was first seen. - :type dead_at: ~datetime.datetime - :param success_at: the time that at which "success" livy state was first seen. - :type success_at: ~datetime.datetime - :param terminated_at: the time that at which "killed" livy state was first seen. - :type terminated_at: ~datetime.datetime - :param recovering_at: the time that at which "recovering" livy state was first seen. - :type recovering_at: ~datetime.datetime - :param current_state: the Spark job state. - :type current_state: str - :param job_creation_request: - :type job_creation_request: ~azure.synapse.artifacts.models.SparkRequest + :param submitted_at: + :type submitted_at: ~datetime.datetime + :param scheduled_at: + :type scheduled_at: ~datetime.datetime + :param ended_at: + :type ended_at: ~datetime.datetime + :param cancellation_requested_at: + :type cancellation_requested_at: ~datetime.datetime + :param current_state: Possible values include: "Queued", "Scheduled", "Ended". + :type current_state: str or ~azure.synapse.artifacts.models.SchedulerCurrentState """ _attribute_map = { - 'not_started_at': {'key': 'notStartedAt', 'type': 'iso-8601'}, - 'starting_at': {'key': 'startingAt', 'type': 'iso-8601'}, - 'running_at': {'key': 'runningAt', 'type': 'iso-8601'}, - 'dead_at': {'key': 'deadAt', 'type': 'iso-8601'}, - 'success_at': {'key': 'successAt', 'type': 'iso-8601'}, - 'terminated_at': {'key': 'killedAt', 'type': 'iso-8601'}, - 'recovering_at': {'key': 'recoveringAt', 'type': 'iso-8601'}, + 'submitted_at': {'key': 'submittedAt', 'type': 'iso-8601'}, + 'scheduled_at': {'key': 'scheduledAt', 'type': 'iso-8601'}, + 'ended_at': {'key': 'endedAt', 'type': 'iso-8601'}, + 'cancellation_requested_at': {'key': 'cancellationRequestedAt', 'type': 'iso-8601'}, 'current_state': {'key': 'currentState', 'type': 'str'}, - 'job_creation_request': {'key': 'jobCreationRequest', 'type': 'SparkRequest'}, } def __init__( self, **kwargs ): - super(SparkBatchJobState, self).__init__(**kwargs) - self.not_started_at = kwargs.get('not_started_at', None) - self.starting_at = kwargs.get('starting_at', None) - self.running_at = kwargs.get('running_at', None) - self.dead_at = kwargs.get('dead_at', None) - self.success_at = kwargs.get('success_at', None) - self.terminated_at = kwargs.get('terminated_at', None) - self.recovering_at = kwargs.get('recovering_at', None) + super(SparkScheduler, self).__init__(**kwargs) + self.submitted_at = kwargs.get('submitted_at', None) + self.scheduled_at = kwargs.get('scheduled_at', None) + self.ended_at = kwargs.get('ended_at', None) + self.cancellation_requested_at = kwargs.get('cancellation_requested_at', None) self.current_state = kwargs.get('current_state', None) - self.job_creation_request = kwargs.get('job_creation_request', None) - -class SparkJobDefinition(msrest.serialization.Model): - """Spark job definition. - All required parameters must be populated in order to send to Azure. +class SparkServiceError(msrest.serialization.Model): + """SparkServiceError. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param description: The description of the Spark job definition. - :type description: str - :param target_big_data_pool: Required. Big data pool reference. - :type target_big_data_pool: ~azure.synapse.artifacts.models.BigDataPoolReference - :param required_spark_version: The required Spark version of the application. - :type required_spark_version: str - :param language: The language of the Spark application. - :type language: str - :param job_properties: Required. The properties of the Spark job. - :type job_properties: ~azure.synapse.artifacts.models.SparkJobProperties + :param message: + :type message: str + :param error_code: + :type error_code: str + :param source: Possible values include: "System", "User", "Unknown", "Dependency". + :type source: str or ~azure.synapse.artifacts.models.SparkErrorSource """ - _validation = { - 'target_big_data_pool': {'required': True}, - 'job_properties': {'required': True}, + _attribute_map = { + 'message': {'key': 'message', 'type': 'str'}, + 'error_code': {'key': 'errorCode', 'type': 'str'}, + 'source': {'key': 'source', 'type': 'str'}, } + def __init__( + self, + **kwargs + ): + super(SparkServiceError, self).__init__(**kwargs) + self.message = kwargs.get('message', None) + self.error_code = kwargs.get('error_code', None) + self.source = kwargs.get('source', None) + + +class SparkServicePlugin(msrest.serialization.Model): + """SparkServicePlugin. + + :param preparation_started_at: + :type preparation_started_at: ~datetime.datetime + :param resource_acquisition_started_at: + :type resource_acquisition_started_at: ~datetime.datetime + :param submission_started_at: + :type submission_started_at: ~datetime.datetime + :param monitoring_started_at: + :type monitoring_started_at: ~datetime.datetime + :param cleanup_started_at: + :type cleanup_started_at: ~datetime.datetime + :param current_state: Possible values include: "Preparation", "ResourceAcquisition", "Queued", + "Submission", "Monitoring", "Cleanup", "Ended". + :type current_state: str or ~azure.synapse.artifacts.models.PluginCurrentState + """ + _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'target_big_data_pool': {'key': 'targetBigDataPool', 'type': 'BigDataPoolReference'}, - 'required_spark_version': {'key': 'requiredSparkVersion', 'type': 'str'}, - 'language': {'key': 'language', 'type': 'str'}, - 'job_properties': {'key': 'jobProperties', 'type': 'SparkJobProperties'}, + 'preparation_started_at': {'key': 'preparationStartedAt', 'type': 'iso-8601'}, + 'resource_acquisition_started_at': {'key': 'resourceAcquisitionStartedAt', 'type': 'iso-8601'}, + 'submission_started_at': {'key': 'submissionStartedAt', 'type': 'iso-8601'}, + 'monitoring_started_at': {'key': 'monitoringStartedAt', 'type': 'iso-8601'}, + 'cleanup_started_at': {'key': 'cleanupStartedAt', 'type': 'iso-8601'}, + 'current_state': {'key': 'currentState', 'type': 'str'}, } def __init__( self, **kwargs ): - super(SparkJobDefinition, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.description = kwargs.get('description', None) - self.target_big_data_pool = kwargs['target_big_data_pool'] - self.required_spark_version = kwargs.get('required_spark_version', None) - self.language = kwargs.get('language', None) - self.job_properties = kwargs['job_properties'] - + super(SparkServicePlugin, self).__init__(**kwargs) + self.preparation_started_at = kwargs.get('preparation_started_at', None) + self.resource_acquisition_started_at = kwargs.get('resource_acquisition_started_at', None) + self.submission_started_at = kwargs.get('submission_started_at', None) + self.monitoring_started_at = kwargs.get('monitoring_started_at', None) + self.cleanup_started_at = kwargs.get('cleanup_started_at', None) + self.current_state = kwargs.get('current_state', None) -class SparkJobDefinitionResource(SubResource): - """Spark job definition resource type. - Variables are only populated by the server, and will be ignored when sending a request. +class SparkSource(TabularSource): + """A copy activity Spark Server source. All required parameters must be populated in order to send to Azure. - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - :param properties: Required. Properties of spark job definition. - :type properties: ~azure.synapse.artifacts.models.SparkJobDefinition + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'properties': {'required': True}, + 'type': {'required': True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'SparkJobDefinition'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, } def __init__( self, **kwargs ): - super(SparkJobDefinitionResource, self).__init__(**kwargs) - self.properties = kwargs['properties'] + super(SparkSource, self).__init__(**kwargs) + self.type = 'SparkSource' # type: str + self.query = kwargs.get('query', None) -class SparkJobDefinitionsListResponse(msrest.serialization.Model): - """A list of spark job definitions resources. +class SqlConnection(msrest.serialization.Model): + """The connection used to execute the SQL script. All required parameters must be populated in order to send to Azure. - :param value: Required. List of spark job definitions. - :type value: list[~azure.synapse.artifacts.models.SparkJobDefinitionResource] - :param next_link: The link to the next page of results, if any remaining results exist. - :type next_link: str + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The type of the connection. Possible values include: "SqlOnDemand", + "SqlPool". + :type type: str or ~azure.synapse.artifacts.models.SqlConnectionType + :param name: Required. The identifier of the connection. + :type name: str """ _validation = { - 'value': {'required': True}, + 'type': {'required': True}, + 'name': {'required': True}, } _attribute_map = { - 'value': {'key': 'value', 'type': '[SparkJobDefinitionResource]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, } def __init__( self, **kwargs ): - super(SparkJobDefinitionsListResponse, self).__init__(**kwargs) - self.value = kwargs['value'] - self.next_link = kwargs.get('next_link', None) + super(SqlConnection, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = kwargs['type'] + self.name = kwargs['name'] -class SparkJobProperties(msrest.serialization.Model): - """The properties of the Spark job. +class SqlDWSink(CopySink): + """A copy activity SQL Data Warehouse sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: The name of the job. - :type name: str - :param file: Required. File containing the application to execute. - :type file: str - :param class_name: Main class for Java/Scala application. - :type class_name: str - :param conf: Spark configuration properties. - :type conf: object - :param args: Command line arguments for the application. - :type args: list[str] - :param jars: Jars to be used in this job. - :type jars: list[str] - :param files: files to be used in this job. - :type files: list[str] - :param archives: Archives to be used in this job. - :type archives: list[str] - :param driver_memory: Required. Amount of memory to use for the driver process. - :type driver_memory: str - :param driver_cores: Required. Number of cores to use for the driver. - :type driver_cores: int - :param executor_memory: Required. Amount of memory to use per executor process. - :type executor_memory: str - :param executor_cores: Required. Number of cores to use for each executor. - :type executor_cores: int - :param num_executors: Required. Number of executors to launch for this job. - :type num_executors: int + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType + string). + :type pre_copy_script: object + :param allow_poly_base: Indicates to use PolyBase to copy data into SQL Data Warehouse when + applicable. Type: boolean (or Expression with resultType boolean). + :type allow_poly_base: object + :param poly_base_settings: Specifies PolyBase-related settings when allowPolyBase is true. + :type poly_base_settings: ~azure.synapse.artifacts.models.PolybaseSettings + :param allow_copy_command: Indicates to use Copy Command to copy data into SQL Data Warehouse. + Type: boolean (or Expression with resultType boolean). + :type allow_copy_command: object + :param copy_command_settings: Specifies Copy Command related settings when allowCopyCommand is + true. + :type copy_command_settings: ~azure.synapse.artifacts.models.DWCopyCommandSettings + :param table_option: The option to handle sink table, such as autoCreate. For now only + 'autoCreate' value is supported. Type: string (or Expression with resultType string). + :type table_option: object """ _validation = { - 'file': {'required': True}, - 'driver_memory': {'required': True}, - 'driver_cores': {'required': True}, - 'executor_memory': {'required': True}, - 'executor_cores': {'required': True}, - 'num_executors': {'required': True}, + 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'file': {'key': 'file', 'type': 'str'}, - 'class_name': {'key': 'className', 'type': 'str'}, - 'conf': {'key': 'conf', 'type': 'object'}, - 'args': {'key': 'args', 'type': '[str]'}, - 'jars': {'key': 'jars', 'type': '[str]'}, - 'files': {'key': 'files', 'type': '[str]'}, - 'archives': {'key': 'archives', 'type': '[str]'}, - 'driver_memory': {'key': 'driverMemory', 'type': 'str'}, - 'driver_cores': {'key': 'driverCores', 'type': 'int'}, - 'executor_memory': {'key': 'executorMemory', 'type': 'str'}, - 'executor_cores': {'key': 'executorCores', 'type': 'int'}, - 'num_executors': {'key': 'numExecutors', 'type': 'int'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'allow_poly_base': {'key': 'allowPolyBase', 'type': 'object'}, + 'poly_base_settings': {'key': 'polyBaseSettings', 'type': 'PolybaseSettings'}, + 'allow_copy_command': {'key': 'allowCopyCommand', 'type': 'object'}, + 'copy_command_settings': {'key': 'copyCommandSettings', 'type': 'DWCopyCommandSettings'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, } def __init__( self, **kwargs ): - super(SparkJobProperties, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.name = kwargs.get('name', None) - self.file = kwargs['file'] - self.class_name = kwargs.get('class_name', None) - self.conf = kwargs.get('conf', None) - self.args = kwargs.get('args', None) - self.jars = kwargs.get('jars', None) - self.files = kwargs.get('files', None) - self.archives = kwargs.get('archives', None) - self.driver_memory = kwargs['driver_memory'] - self.driver_cores = kwargs['driver_cores'] - self.executor_memory = kwargs['executor_memory'] - self.executor_cores = kwargs['executor_cores'] - self.num_executors = kwargs['num_executors'] + super(SqlDWSink, self).__init__(**kwargs) + self.type = 'SqlDWSink' # type: str + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.allow_poly_base = kwargs.get('allow_poly_base', None) + self.poly_base_settings = kwargs.get('poly_base_settings', None) + self.allow_copy_command = kwargs.get('allow_copy_command', None) + self.copy_command_settings = kwargs.get('copy_command_settings', None) + self.table_option = kwargs.get('table_option', None) -class SparkLinkedService(LinkedService): - """Spark Server linked service. +class SqlDWSource(TabularSource): + """A copy activity SQL Data Warehouse source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param host: Required. IP address or host name of the Spark server. - :type host: object - :param port: Required. The TCP port that the Spark server uses to listen for client - connections. - :type port: object - :param server_type: The type of Spark server. Possible values include: "SharkServer", - "SharkServer2", "SparkThriftServer". - :type server_type: str or ~azure.synapse.artifacts.models.SparkServerType - :param thrift_transport_protocol: The transport protocol to use in the Thrift layer. Possible - values include: "Binary", "SASL", "HTTP ". - :type thrift_transport_protocol: str or - ~azure.synapse.artifacts.models.SparkThriftTransportProtocol - :param authentication_type: Required. The authentication method used to access the Spark - server. Possible values include: "Anonymous", "Username", "UsernameAndPassword", - "WindowsAzureHDInsightService". - :type authentication_type: str or ~azure.synapse.artifacts.models.SparkAuthenticationType - :param username: The user name that you use to access Spark Server. - :type username: object - :param password: The password corresponding to the user name that you provided in the Username - field. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param http_path: The partial URL corresponding to the Spark server. - :type http_path: object - :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The - default value is false. - :type enable_ssl: object - :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for - verifying the server when connecting over SSL. This property can only be set when using SSL on - self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust - store or from a specified PEM file. The default value is false. - :type use_system_trust_store: object - :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate - name to match the host name of the server when connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: object - :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from - the server. The default value is false. - :type allow_self_signed_server_cert: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param sql_reader_query: SQL Data Warehouse reader query. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Data Warehouse + source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression + with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored procedure parameters. + Example: "{Parameter1: {value: "1", type: "int"}}". Type: object (or Expression with resultType + object), itemType: StoredProcedureParameter. + :type stored_procedure_parameters: object """ _validation = { 'type': {'required': True}, - 'host': {'required': True}, - 'port': {'required': True}, - 'authentication_type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'server_type': {'key': 'typeProperties.serverType', 'type': 'str'}, - 'thrift_transport_protocol': {'key': 'typeProperties.thriftTransportProtocol', 'type': 'str'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, - 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, - 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': 'object'}, } def __init__( self, **kwargs ): - super(SparkLinkedService, self).__init__(**kwargs) - self.type = 'Spark' - self.host = kwargs['host'] - self.port = kwargs['port'] - self.server_type = kwargs.get('server_type', None) - self.thrift_transport_protocol = kwargs.get('thrift_transport_protocol', None) - self.authentication_type = kwargs['authentication_type'] - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.http_path = kwargs.get('http_path', None) - self.enable_ssl = kwargs.get('enable_ssl', None) - self.trusted_cert_path = kwargs.get('trusted_cert_path', None) - self.use_system_trust_store = kwargs.get('use_system_trust_store', None) - self.allow_host_name_cn_mismatch = kwargs.get('allow_host_name_cn_mismatch', None) - self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(SqlDWSource, self).__init__(**kwargs) + self.type = 'SqlDWSource' # type: str + self.sql_reader_query = kwargs.get('sql_reader_query', None) + self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) -class SparkObjectDataset(Dataset): - """Spark Server dataset. +class SqlMISink(CopySink): + """A copy activity Azure SQL Managed Instance sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table - properties instead. - :type table_name: object - :param table: The table name of the Spark. Type: string (or Expression with resultType string). - :type table: object - :param schema_type_properties_schema: The schema name of the Spark. Type: string (or Expression - with resultType string). - :type schema_type_properties_schema: object + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or + Expression with resultType string). + :type sql_writer_stored_procedure_name: object + :param sql_writer_table_type: SQL writer table type. Type: string (or Expression with + resultType string). + :type sql_writer_table_type: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType + string). + :type pre_copy_script: object + :param stored_procedure_parameters: SQL stored procedure parameters. + :type stored_procedure_parameters: dict[str, + ~azure.synapse.artifacts.models.StoredProcedureParameter] + :param stored_procedure_table_type_parameter_name: The stored procedure parameter name of the + table type. Type: string (or Expression with resultType string). + :type stored_procedure_table_type_parameter_name: object + :param table_option: The option to handle sink table, such as autoCreate. For now only + 'autoCreate' value is supported. Type: string (or Expression with resultType string). + :type table_option: object """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, + 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, } def __init__( self, **kwargs ): - super(SparkObjectDataset, self).__init__(**kwargs) - self.type = 'SparkObject' - self.table_name = kwargs.get('table_name', None) - self.table = kwargs.get('table', None) - self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) + super(SqlMISink, self).__init__(**kwargs) + self.type = 'SqlMISink' # type: str + self.sql_writer_stored_procedure_name = kwargs.get('sql_writer_stored_procedure_name', None) + self.sql_writer_table_type = kwargs.get('sql_writer_table_type', None) + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None) + self.table_option = kwargs.get('table_option', None) -class SparkRequest(msrest.serialization.Model): - """SparkRequest. +class SqlMISource(TabularSource): + """A copy activity Azure SQL Managed Instance source. - :param name: - :type name: str - :param file: - :type file: str - :param class_name: - :type class_name: str - :param arguments: - :type arguments: list[str] - :param jars: - :type jars: list[str] - :param python_files: - :type python_files: list[str] - :param files: - :type files: list[str] - :param archives: - :type archives: list[str] - :param configuration: Dictionary of :code:``. - :type configuration: dict[str, str] - :param driver_memory: - :type driver_memory: str - :param driver_cores: - :type driver_cores: int - :param executor_memory: - :type executor_memory: str - :param executor_cores: - :type executor_cores: int - :param executor_count: - :type executor_count: int + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for a Azure SQL Managed + Instance source. This cannot be used at the same time as SqlReaderQuery. Type: string (or + Expression with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored procedure parameters. + Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.synapse.artifacts.models.StoredProcedureParameter] + :param produce_additional_types: Which additional types to produce. + :type produce_additional_types: object """ + _validation = { + 'type': {'required': True}, + } + _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'file': {'key': 'file', 'type': 'str'}, - 'class_name': {'key': 'className', 'type': 'str'}, - 'arguments': {'key': 'args', 'type': '[str]'}, - 'jars': {'key': 'jars', 'type': '[str]'}, - 'python_files': {'key': 'pyFiles', 'type': '[str]'}, - 'files': {'key': 'files', 'type': '[str]'}, - 'archives': {'key': 'archives', 'type': '[str]'}, - 'configuration': {'key': 'conf', 'type': '{str}'}, - 'driver_memory': {'key': 'driverMemory', 'type': 'str'}, - 'driver_cores': {'key': 'driverCores', 'type': 'int'}, - 'executor_memory': {'key': 'executorMemory', 'type': 'str'}, - 'executor_cores': {'key': 'executorCores', 'type': 'int'}, - 'executor_count': {'key': 'numExecutors', 'type': 'int'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, } def __init__( self, **kwargs ): - super(SparkRequest, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - self.file = kwargs.get('file', None) - self.class_name = kwargs.get('class_name', None) - self.arguments = kwargs.get('arguments', None) - self.jars = kwargs.get('jars', None) - self.python_files = kwargs.get('python_files', None) - self.files = kwargs.get('files', None) - self.archives = kwargs.get('archives', None) - self.configuration = kwargs.get('configuration', None) - self.driver_memory = kwargs.get('driver_memory', None) - self.driver_cores = kwargs.get('driver_cores', None) - self.executor_memory = kwargs.get('executor_memory', None) - self.executor_cores = kwargs.get('executor_cores', None) - self.executor_count = kwargs.get('executor_count', None) + super(SqlMISource, self).__init__(**kwargs) + self.type = 'SqlMISource' # type: str + self.sql_reader_query = kwargs.get('sql_reader_query', None) + self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.produce_additional_types = kwargs.get('produce_additional_types', None) -class SparkScheduler(msrest.serialization.Model): - """SparkScheduler. +class SqlPool(TrackedResource): + """A SQL Analytics pool. - :param submitted_at: - :type submitted_at: ~datetime.datetime - :param scheduled_at: - :type scheduled_at: ~datetime.datetime - :param ended_at: - :type ended_at: ~datetime.datetime - :param cancellation_requested_at: - :type cancellation_requested_at: ~datetime.datetime - :param current_state: Possible values include: "Queued", "Scheduled", "Ended". - :type current_state: str or ~azure.synapse.artifacts.models.SchedulerCurrentState + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + :param tags: A set of tags. Resource tags. + :type tags: dict[str, str] + :param location: Required. The geo-location where the resource lives. + :type location: str + :param sku: SQL pool SKU. + :type sku: ~azure.synapse.artifacts.models.Sku + :param max_size_bytes: Maximum size in bytes. + :type max_size_bytes: long + :param collation: Collation mode. + :type collation: str + :param source_database_id: Source database to create from. + :type source_database_id: str + :param recoverable_database_id: Backup database to restore from. + :type recoverable_database_id: str + :param provisioning_state: Resource state. + :type provisioning_state: str + :param status: Resource status. + :type status: str + :param restore_point_in_time: Snapshot time to restore. + :type restore_point_in_time: ~datetime.datetime + :param create_mode: What is this?. + :type create_mode: str + :param creation_date: Date the SQL pool was created. + :type creation_date: ~datetime.datetime """ + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'location': {'required': True}, + } + _attribute_map = { - 'submitted_at': {'key': 'submittedAt', 'type': 'iso-8601'}, - 'scheduled_at': {'key': 'scheduledAt', 'type': 'iso-8601'}, - 'ended_at': {'key': 'endedAt', 'type': 'iso-8601'}, - 'cancellation_requested_at': {'key': 'cancellationRequestedAt', 'type': 'iso-8601'}, - 'current_state': {'key': 'currentState', 'type': 'str'}, + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'location': {'key': 'location', 'type': 'str'}, + 'sku': {'key': 'sku', 'type': 'Sku'}, + 'max_size_bytes': {'key': 'properties.maxSizeBytes', 'type': 'long'}, + 'collation': {'key': 'properties.collation', 'type': 'str'}, + 'source_database_id': {'key': 'properties.sourceDatabaseId', 'type': 'str'}, + 'recoverable_database_id': {'key': 'properties.recoverableDatabaseId', 'type': 'str'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + 'status': {'key': 'properties.status', 'type': 'str'}, + 'restore_point_in_time': {'key': 'properties.restorePointInTime', 'type': 'iso-8601'}, + 'create_mode': {'key': 'properties.createMode', 'type': 'str'}, + 'creation_date': {'key': 'properties.creationDate', 'type': 'iso-8601'}, } def __init__( self, **kwargs ): - super(SparkScheduler, self).__init__(**kwargs) - self.submitted_at = kwargs.get('submitted_at', None) - self.scheduled_at = kwargs.get('scheduled_at', None) - self.ended_at = kwargs.get('ended_at', None) - self.cancellation_requested_at = kwargs.get('cancellation_requested_at', None) - self.current_state = kwargs.get('current_state', None) + super(SqlPool, self).__init__(**kwargs) + self.sku = kwargs.get('sku', None) + self.max_size_bytes = kwargs.get('max_size_bytes', None) + self.collation = kwargs.get('collation', None) + self.source_database_id = kwargs.get('source_database_id', None) + self.recoverable_database_id = kwargs.get('recoverable_database_id', None) + self.provisioning_state = kwargs.get('provisioning_state', None) + self.status = kwargs.get('status', None) + self.restore_point_in_time = kwargs.get('restore_point_in_time', None) + self.create_mode = kwargs.get('create_mode', None) + self.creation_date = kwargs.get('creation_date', None) -class SparkServiceError(msrest.serialization.Model): - """SparkServiceError. +class SqlPoolInfoListResult(msrest.serialization.Model): + """List of SQL pools. - :param message: - :type message: str - :param error_code: - :type error_code: str - :param source: Possible values include: "System", "User", "Unknown", "Dependency". - :type source: str or ~azure.synapse.artifacts.models.SparkErrorSource + :param next_link: Link to the next page of results. + :type next_link: str + :param value: List of SQL pools. + :type value: list[~azure.synapse.artifacts.models.SqlPool] """ _attribute_map = { - 'message': {'key': 'message', 'type': 'str'}, - 'error_code': {'key': 'errorCode', 'type': 'str'}, - 'source': {'key': 'source', 'type': 'str'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'value': {'key': 'value', 'type': '[SqlPool]'}, } def __init__( self, **kwargs ): - super(SparkServiceError, self).__init__(**kwargs) - self.message = kwargs.get('message', None) - self.error_code = kwargs.get('error_code', None) - self.source = kwargs.get('source', None) + super(SqlPoolInfoListResult, self).__init__(**kwargs) + self.next_link = kwargs.get('next_link', None) + self.value = kwargs.get('value', None) -class SparkServicePlugin(msrest.serialization.Model): - """SparkServicePlugin. +class SqlPoolReference(msrest.serialization.Model): + """SQL pool reference type. - :param preparation_started_at: - :type preparation_started_at: ~datetime.datetime - :param resource_acquisition_started_at: - :type resource_acquisition_started_at: ~datetime.datetime - :param submission_started_at: - :type submission_started_at: ~datetime.datetime - :param monitoring_started_at: - :type monitoring_started_at: ~datetime.datetime - :param cleanup_started_at: - :type cleanup_started_at: ~datetime.datetime - :param current_state: Possible values include: "Preparation", "ResourceAcquisition", "Queued", - "Submission", "Monitoring", "Cleanup", "Ended". - :type current_state: str or ~azure.synapse.artifacts.models.PluginCurrentState + All required parameters must be populated in order to send to Azure. + + :param type: Required. SQL pool reference type. Possible values include: "SqlPoolReference". + :type type: str or ~azure.synapse.artifacts.models.SqlPoolReferenceType + :param reference_name: Required. Reference SQL pool name. + :type reference_name: str """ + _validation = { + 'type': {'required': True}, + 'reference_name': {'required': True}, + } + _attribute_map = { - 'preparation_started_at': {'key': 'preparationStartedAt', 'type': 'iso-8601'}, - 'resource_acquisition_started_at': {'key': 'resourceAcquisitionStartedAt', 'type': 'iso-8601'}, - 'submission_started_at': {'key': 'submissionStartedAt', 'type': 'iso-8601'}, - 'monitoring_started_at': {'key': 'monitoringStartedAt', 'type': 'iso-8601'}, - 'cleanup_started_at': {'key': 'cleanupStartedAt', 'type': 'iso-8601'}, - 'current_state': {'key': 'currentState', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, } def __init__( self, **kwargs ): - super(SparkServicePlugin, self).__init__(**kwargs) - self.preparation_started_at = kwargs.get('preparation_started_at', None) - self.resource_acquisition_started_at = kwargs.get('resource_acquisition_started_at', None) - self.submission_started_at = kwargs.get('submission_started_at', None) - self.monitoring_started_at = kwargs.get('monitoring_started_at', None) - self.cleanup_started_at = kwargs.get('cleanup_started_at', None) - self.current_state = kwargs.get('current_state', None) + super(SqlPoolReference, self).__init__(**kwargs) + self.type = kwargs['type'] + self.reference_name = kwargs['reference_name'] -class SqlConnection(msrest.serialization.Model): - """The connection used to execute the SQL script. +class SqlPoolStoredProcedureActivity(Activity): + """Execute SQL pool stored procedure activity. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. The type of the connection. Possible values include: "SqlOnDemand", - "SqlPool". - :type type: str or ~azure.synapse.artifacts.models.SqlConnectionType - :param name: Required. The identifier of the connection. + :param name: Required. Activity name. :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param sql_pool: Required. SQL pool stored procedure reference. + :type sql_pool: ~azure.synapse.artifacts.models.SqlPoolReference + :param stored_procedure_name: Required. Stored procedure name. Type: string (or Expression with + resultType string). + :type stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored procedure parameters. + Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.synapse.artifacts.models.StoredProcedureParameter] """ _validation = { - 'type': {'required': True}, 'name': {'required': True}, + 'type': {'required': True}, + 'sql_pool': {'required': True}, + 'stored_procedure_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'sql_pool': {'key': 'sqlPool', 'type': 'SqlPoolReference'}, + 'stored_procedure_name': {'key': 'typeProperties.storedProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'typeProperties.storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, } def __init__( self, **kwargs ): - super(SqlConnection, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.type = kwargs['type'] - self.name = kwargs['name'] + super(SqlPoolStoredProcedureActivity, self).__init__(**kwargs) + self.type = 'SqlPoolStoredProcedure' # type: str + self.sql_pool = kwargs['sql_pool'] + self.stored_procedure_name = kwargs['stored_procedure_name'] + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) class SqlScript(msrest.serialization.Model): """SQL script. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this @@ -17553,14 +27679,13 @@ class SqlScript(msrest.serialization.Model): :type additional_properties: dict[str, object] :param description: The description of the SQL script. :type description: str - :ivar type: The type of the SQL script. Default value: "SqlQuery". - :vartype type: str + :param type: The type of the SQL script. Possible values include: "SqlQuery". + :type type: str or ~azure.synapse.artifacts.models.SqlScriptType :param content: Required. The content of the SQL script. :type content: ~azure.synapse.artifacts.models.SqlScriptContent """ _validation = { - 'type': {'constant': True}, 'content': {'required': True}, } @@ -17571,8 +27696,6 @@ class SqlScript(msrest.serialization.Model): 'content': {'key': 'content', 'type': 'SqlScriptContent'}, } - type = "SqlQuery" - def __init__( self, **kwargs @@ -17580,6 +27703,7 @@ def __init__( super(SqlScript, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) self.description = kwargs.get('description', None) + self.type = kwargs.get('type', None) self.content = kwargs['content'] @@ -17646,20 +27770,22 @@ def __init__( self.language = kwargs.get('language', None) -class SqlScriptResource(SubResource): +class SqlScriptResource(AzureEntityResource): """Sql Script resource type. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :ivar id: The resource identifier. + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. :vartype id: str - :ivar name: The resource name. + :ivar name: The name of the resource. :vartype name: str - :ivar type: The resource type. + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. :vartype type: str - :ivar etag: Etag identifies change in the resource. + :ivar etag: Resource Etag. :vartype etag: str :param properties: Required. Properties of sql script. :type properties: ~azure.synapse.artifacts.models.SqlScript @@ -17773,13 +27899,157 @@ def __init__( **kwargs ): super(SqlServerLinkedService, self).__init__(**kwargs) - self.type = 'SqlServer' + self.type = 'SqlServer' # type: str self.connection_string = kwargs['connection_string'] self.user_name = kwargs.get('user_name', None) self.password = kwargs.get('password', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) +class SqlServerSink(CopySink): + """A copy activity SQL server sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or + Expression with resultType string). + :type sql_writer_stored_procedure_name: object + :param sql_writer_table_type: SQL writer table type. Type: string (or Expression with + resultType string). + :type sql_writer_table_type: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType + string). + :type pre_copy_script: object + :param stored_procedure_parameters: SQL stored procedure parameters. + :type stored_procedure_parameters: dict[str, + ~azure.synapse.artifacts.models.StoredProcedureParameter] + :param stored_procedure_table_type_parameter_name: The stored procedure parameter name of the + table type. Type: string (or Expression with resultType string). + :type stored_procedure_table_type_parameter_name: object + :param table_option: The option to handle sink table, such as autoCreate. For now only + 'autoCreate' value is supported. Type: string (or Expression with resultType string). + :type table_option: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, + 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(SqlServerSink, self).__init__(**kwargs) + self.type = 'SqlServerSink' # type: str + self.sql_writer_stored_procedure_name = kwargs.get('sql_writer_stored_procedure_name', None) + self.sql_writer_table_type = kwargs.get('sql_writer_table_type', None) + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None) + self.table_option = kwargs.get('table_option', None) + + +class SqlServerSource(TabularSource): + """A copy activity SQL server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database + source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression + with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored procedure parameters. + Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.synapse.artifacts.models.StoredProcedureParameter] + :param produce_additional_types: Which additional types to produce. + :type produce_additional_types: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(SqlServerSource, self).__init__(**kwargs) + self.type = 'SqlServerSource' # type: str + self.sql_reader_query = kwargs.get('sql_reader_query', None) + self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.produce_additional_types = kwargs.get('produce_additional_types', None) + + class SqlServerStoredProcedureActivity(ExecutionActivity): """SQL stored procedure activity type. @@ -17835,7 +28105,7 @@ def __init__( **kwargs ): super(SqlServerStoredProcedureActivity, self).__init__(**kwargs) - self.type = 'SqlServerStoredProcedure' + self.type = 'SqlServerStoredProcedure' # type: str self.stored_procedure_name = kwargs['stored_procedure_name'] self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) @@ -17903,12 +28173,152 @@ def __init__( **kwargs ): super(SqlServerTableDataset, self).__init__(**kwargs) - self.type = 'SqlServerTable' + self.type = 'SqlServerTable' # type: str self.table_name = kwargs.get('table_name', None) self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) self.table = kwargs.get('table', None) +class SqlSink(CopySink): + """A copy activity SQL sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or + Expression with resultType string). + :type sql_writer_stored_procedure_name: object + :param sql_writer_table_type: SQL writer table type. Type: string (or Expression with + resultType string). + :type sql_writer_table_type: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType + string). + :type pre_copy_script: object + :param stored_procedure_parameters: SQL stored procedure parameters. + :type stored_procedure_parameters: dict[str, + ~azure.synapse.artifacts.models.StoredProcedureParameter] + :param stored_procedure_table_type_parameter_name: The stored procedure parameter name of the + table type. Type: string (or Expression with resultType string). + :type stored_procedure_table_type_parameter_name: object + :param table_option: The option to handle sink table, such as autoCreate. For now only + 'autoCreate' value is supported. Type: string (or Expression with resultType string). + :type table_option: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, + 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(SqlSink, self).__init__(**kwargs) + self.type = 'SqlSink' # type: str + self.sql_writer_stored_procedure_name = kwargs.get('sql_writer_stored_procedure_name', None) + self.sql_writer_table_type = kwargs.get('sql_writer_table_type', None) + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None) + self.table_option = kwargs.get('table_option', None) + + +class SqlSource(TabularSource): + """A copy activity SQL source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database + source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression + with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored procedure parameters. + Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.synapse.artifacts.models.StoredProcedureParameter] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + } + + def __init__( + self, + **kwargs + ): + super(SqlSource, self).__init__(**kwargs) + self.type = 'SqlSource' # type: str + self.sql_reader_query = kwargs.get('sql_reader_query', None) + self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + + class SquareLinkedService(LinkedService): """Square Service linked service. @@ -17981,7 +28391,7 @@ def __init__( **kwargs ): super(SquareLinkedService, self).__init__(**kwargs) - self.type = 'Square' + self.type = 'Square' # type: str self.host = kwargs['host'] self.client_id = kwargs['client_id'] self.client_secret = kwargs.get('client_secret', None) @@ -18045,9 +28455,59 @@ def __init__( self, **kwargs ): - super(SquareObjectDataset, self).__init__(**kwargs) - self.type = 'SquareObject' - self.table_name = kwargs.get('table_name', None) + super(SquareObjectDataset, self).__init__(**kwargs) + self.type = 'SquareObject' # type: str + self.table_name = kwargs.get('table_name', None) + + +class SquareSource(TabularSource): + """A copy activity Square Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(SquareSource, self).__init__(**kwargs) + self.type = 'SquareSource' # type: str + self.query = kwargs.get('query', None) class SSISAccessCredential(msrest.serialization.Model): @@ -18189,15 +28649,13 @@ def __init__( class SSISLogLocation(msrest.serialization.Model): """SSIS package execution log location. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param log_path: Required. The SSIS package execution log path. Type: string (or Expression with resultType string). :type log_path: object - :ivar type: Required. The type of SSIS log location. Default value: "File". - :vartype type: str + :param type: Required. The type of SSIS log location. Possible values include: "File". + :type type: str or ~azure.synapse.artifacts.models.SsisLogLocationType :param access_credential: The package execution log access credential. :type access_credential: ~azure.synapse.artifacts.models.SSISAccessCredential :param log_refresh_interval: Specifies the interval to refresh log. The default interval is 5 @@ -18208,7 +28666,7 @@ class SSISLogLocation(msrest.serialization.Model): _validation = { 'log_path': {'required': True}, - 'type': {'required': True, 'constant': True}, + 'type': {'required': True}, } _attribute_map = { @@ -18218,14 +28676,13 @@ class SSISLogLocation(msrest.serialization.Model): 'log_refresh_interval': {'key': 'typeProperties.logRefreshInterval', 'type': 'object'}, } - type = "File" - def __init__( self, **kwargs ): super(SSISLogLocation, self).__init__(**kwargs) self.log_path = kwargs['log_path'] + self.type = kwargs['type'] self.access_credential = kwargs.get('access_credential', None) self.log_refresh_interval = kwargs.get('log_refresh_interval', None) @@ -18474,6 +28931,44 @@ def __init__( self.type = kwargs.get('type', None) +class SubResource(AzureEntityResource): + """Azure Synapse nested resource, which belongs to a workspace. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + :ivar etag: Resource Etag. + :vartype etag: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(SubResource, self).__init__(**kwargs) + + class SwitchActivity(Activity): """This activity evaluates an expression and executes activities under the cases property that correspond to the expression evaluation expected in the equals property. @@ -18527,7 +29022,7 @@ def __init__( **kwargs ): super(SwitchActivity, self).__init__(**kwargs) - self.type = 'Switch' + self.type = 'Switch' # type: str self.on = kwargs['on'] self.cases = kwargs.get('cases', None) self.default_activities = kwargs.get('default_activities', None) @@ -18623,7 +29118,7 @@ def __init__( **kwargs ): super(SybaseLinkedService, self).__init__(**kwargs) - self.type = 'Sybase' + self.type = 'Sybase' # type: str self.server = kwargs['server'] self.database = kwargs['database'] self.schema = kwargs.get('schema', None) @@ -18633,62 +29128,331 @@ def __init__( self.encrypted_credential = kwargs.get('encrypted_credential', None) -class SybaseTableDataset(Dataset): - """The Sybase table dataset. +class SybaseSource(TabularSource): + """A copy activity source for Sybase databases. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: Database query. Type: string (or Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(SybaseSource, self).__init__(**kwargs) + self.type = 'SybaseSource' # type: str + self.query = kwargs.get('query', None) + + +class SybaseTableDataset(Dataset): + """The Sybase table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: The Sybase table name. Type: string (or Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(SybaseTableDataset, self).__init__(**kwargs) + self.type = 'SybaseTable' # type: str + self.table_name = kwargs.get('table_name', None) + + +class SynapseNotebookActivity(Activity): + """Execute Synapse notebook activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param notebook: Required. Synapse notebook reference. + :type notebook: ~azure.synapse.artifacts.models.SynapseNotebookReference + :param parameters: Notebook parameters. + :type parameters: dict[str, object] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'notebook': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'notebook': {'key': 'typeProperties.notebook', 'type': 'SynapseNotebookReference'}, + 'parameters': {'key': 'typeProperties.parameters', 'type': '{object}'}, + } + + def __init__( + self, + **kwargs + ): + super(SynapseNotebookActivity, self).__init__(**kwargs) + self.type = 'SynapseNotebook' # type: str + self.notebook = kwargs['notebook'] + self.parameters = kwargs.get('parameters', None) + + +class SynapseNotebookReference(msrest.serialization.Model): + """Synapse notebook reference type. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Synapse notebook reference type. Possible values include: + "NotebookReference". + :type type: str or ~azure.synapse.artifacts.models.NotebookReferenceType + :param reference_name: Required. Reference notebook name. + :type reference_name: str + """ + + _validation = { + 'type': {'required': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(SynapseNotebookReference, self).__init__(**kwargs) + self.type = kwargs['type'] + self.reference_name = kwargs['reference_name'] + + +class SynapseSparkJobDefinitionActivity(Activity): + """Execute spark job activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param spark_job: Required. Synapse spark job reference. + :type spark_job: ~azure.synapse.artifacts.models.SynapseSparkJobReference + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'spark_job': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'spark_job': {'key': 'typeProperties.sparkJob', 'type': 'SynapseSparkJobReference'}, + } + + def __init__( + self, + **kwargs + ): + super(SynapseSparkJobDefinitionActivity, self).__init__(**kwargs) + self.type = 'SparkJob' # type: str + self.spark_job = kwargs['spark_job'] + + +class SynapseSparkJobReference(msrest.serialization.Model): + """Synapse spark job reference type. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Synapse spark job reference type. Possible values include: + "SparkJobDefinitionReference". + :type type: str or ~azure.synapse.artifacts.models.SparkJobReferenceType + :param reference_name: Required. Reference spark job name. + :type reference_name: str + """ + + _validation = { + 'type': {'required': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(SynapseSparkJobReference, self).__init__(**kwargs) + self.type = kwargs['type'] + self.reference_name = kwargs['reference_name'] + + +class TabularTranslator(CopyTranslator): + """A copy activity tabular translator. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Copy translator type.Constant filled by server. :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The Sybase table name. Type: string (or Expression with resultType string). - :type table_name: object + :param column_mappings: Column mappings. Example: "UserId: MyUserId, Group: MyGroup, Name: + MyName" Type: string (or Expression with resultType string). This property will be retired. + Please use mappings property. + :type column_mappings: object + :param schema_mapping: The schema mapping to map between tabular data and hierarchical data. + Example: {"Column1": "$.Column1", "Column2": "$.Column2.Property1", "Column3": + "$.Column2.Property2"}. Type: object (or Expression with resultType object). This property will + be retired. Please use mappings property. + :type schema_mapping: object + :param collection_reference: The JSON Path of the Nested Array that is going to do cross-apply. + Type: object (or Expression with resultType object). + :type collection_reference: object + :param map_complex_values_to_string: Whether to map complex (array and object) values to simple + strings in json format. Type: boolean (or Expression with resultType boolean). + :type map_complex_values_to_string: object + :param mappings: Column mappings with logical types. Tabular->tabular example: + [{"source":{"name":"CustomerName","type":"String"},"sink":{"name":"ClientName","type":"String"}},{"source":{"name":"CustomerAddress","type":"String"},"sink":{"name":"ClientAddress","type":"String"}}]. + Hierarchical->tabular example: + [{"source":{"path":"$.CustomerName","type":"String"},"sink":{"name":"ClientName","type":"String"}},{"source":{"path":"$.CustomerAddress","type":"String"},"sink":{"name":"ClientAddress","type":"String"}}]. + Type: object (or Expression with resultType object). + :type mappings: object """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'column_mappings': {'key': 'columnMappings', 'type': 'object'}, + 'schema_mapping': {'key': 'schemaMapping', 'type': 'object'}, + 'collection_reference': {'key': 'collectionReference', 'type': 'object'}, + 'map_complex_values_to_string': {'key': 'mapComplexValuesToString', 'type': 'object'}, + 'mappings': {'key': 'mappings', 'type': 'object'}, } def __init__( self, **kwargs ): - super(SybaseTableDataset, self).__init__(**kwargs) - self.type = 'SybaseTable' - self.table_name = kwargs.get('table_name', None) + super(TabularTranslator, self).__init__(**kwargs) + self.type = 'TabularTranslator' # type: str + self.column_mappings = kwargs.get('column_mappings', None) + self.schema_mapping = kwargs.get('schema_mapping', None) + self.collection_reference = kwargs.get('collection_reference', None) + self.map_complex_values_to_string = kwargs.get('map_complex_values_to_string', None) + self.mappings = kwargs.get('mappings', None) class TeradataLinkedService(LinkedService): @@ -18752,7 +29516,7 @@ def __init__( **kwargs ): super(TeradataLinkedService, self).__init__(**kwargs) - self.type = 'Teradata' + self.type = 'Teradata' # type: str self.connection_string = kwargs.get('connection_string', None) self.server = kwargs.get('server', None) self.authentication_type = kwargs.get('authentication_type', None) @@ -18761,6 +29525,97 @@ def __init__( self.encrypted_credential = kwargs.get('encrypted_credential', None) +class TeradataPartitionSettings(msrest.serialization.Model): + """The settings that will be leveraged for teradata source partitioning. + + :param partition_column_name: The name of the column that will be used for proceeding range or + hash partitioning. Type: string (or Expression with resultType string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of column specified in partitionColumnName that + will be used for proceeding range partitioning. Type: string (or Expression with resultType + string). + :type partition_upper_bound: object + :param partition_lower_bound: The minimum value of column specified in partitionColumnName that + will be used for proceeding range partitioning. Type: string (or Expression with resultType + string). + :type partition_lower_bound: object + """ + + _attribute_map = { + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(TeradataPartitionSettings, self).__init__(**kwargs) + self.partition_column_name = kwargs.get('partition_column_name', None) + self.partition_upper_bound = kwargs.get('partition_upper_bound', None) + self.partition_lower_bound = kwargs.get('partition_lower_bound', None) + + +class TeradataSource(TabularSource): + """A copy activity Teradata source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: Teradata query. Type: string (or Expression with resultType string). + :type query: object + :param partition_option: The partition mechanism that will be used for teradata read in + parallel. Possible values include: "None", "Hash", "DynamicRange". + :type partition_option: str or ~azure.synapse.artifacts.models.TeradataPartitionOption + :param partition_settings: The settings that will be leveraged for teradata source + partitioning. + :type partition_settings: ~azure.synapse.artifacts.models.TeradataPartitionSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'TeradataPartitionSettings'}, + } + + def __init__( + self, + **kwargs + ): + super(TeradataSource, self).__init__(**kwargs) + self.type = 'TeradataSource' # type: str + self.query = kwargs.get('query', None) + self.partition_option = kwargs.get('partition_option', None) + self.partition_settings = kwargs.get('partition_settings', None) + + class TeradataTableDataset(Dataset): """The Teradata database dataset. @@ -18819,11 +29674,91 @@ def __init__( **kwargs ): super(TeradataTableDataset, self).__init__(**kwargs) - self.type = 'TeradataTable' + self.type = 'TeradataTable' # type: str self.database = kwargs.get('database', None) self.table = kwargs.get('table', None) +class TextFormat(DatasetStorageFormat): + """The data stored in text format. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage format.Constant filled by server. + :type type: str + :param serializer: Serializer. Type: string (or Expression with resultType string). + :type serializer: object + :param deserializer: Deserializer. Type: string (or Expression with resultType string). + :type deserializer: object + :param column_delimiter: The column delimiter. Type: string (or Expression with resultType + string). + :type column_delimiter: object + :param row_delimiter: The row delimiter. Type: string (or Expression with resultType string). + :type row_delimiter: object + :param escape_char: The escape character. Type: string (or Expression with resultType string). + :type escape_char: object + :param quote_char: The quote character. Type: string (or Expression with resultType string). + :type quote_char: object + :param null_value: The null value string. Type: string (or Expression with resultType string). + :type null_value: object + :param encoding_name: The code page name of the preferred encoding. If miss, the default value + is ΓÇ£utf-8ΓÇ¥, unless BOM denotes another Unicode encoding. Refer to the ΓÇ£NameΓÇ¥ column of + the table in the following link to set supported values: + https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with + resultType string). + :type encoding_name: object + :param treat_empty_as_null: Treat empty column values in the text file as null. The default + value is true. Type: boolean (or Expression with resultType boolean). + :type treat_empty_as_null: object + :param skip_line_count: The number of lines/rows to be skipped when parsing text files. The + default value is 0. Type: integer (or Expression with resultType integer). + :type skip_line_count: object + :param first_row_as_header: When used as input, treat the first row of data as headers. When + used as output,write the headers into the output as the first row of data. The default value is + false. Type: boolean (or Expression with resultType boolean). + :type first_row_as_header: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'serializer': {'key': 'serializer', 'type': 'object'}, + 'deserializer': {'key': 'deserializer', 'type': 'object'}, + 'column_delimiter': {'key': 'columnDelimiter', 'type': 'object'}, + 'row_delimiter': {'key': 'rowDelimiter', 'type': 'object'}, + 'escape_char': {'key': 'escapeChar', 'type': 'object'}, + 'quote_char': {'key': 'quoteChar', 'type': 'object'}, + 'null_value': {'key': 'nullValue', 'type': 'object'}, + 'encoding_name': {'key': 'encodingName', 'type': 'object'}, + 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, + 'skip_line_count': {'key': 'skipLineCount', 'type': 'object'}, + 'first_row_as_header': {'key': 'firstRowAsHeader', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(TextFormat, self).__init__(**kwargs) + self.type = 'TextFormat' # type: str + self.column_delimiter = kwargs.get('column_delimiter', None) + self.row_delimiter = kwargs.get('row_delimiter', None) + self.escape_char = kwargs.get('escape_char', None) + self.quote_char = kwargs.get('quote_char', None) + self.null_value = kwargs.get('null_value', None) + self.encoding_name = kwargs.get('encoding_name', None) + self.treat_empty_as_null = kwargs.get('treat_empty_as_null', None) + self.skip_line_count = kwargs.get('skip_line_count', None) + self.first_row_as_header = kwargs.get('first_row_as_header', None) + + class TriggerDependencyProvisioningStatus(msrest.serialization.Model): """Defines the response of a provision trigger dependency operation. @@ -18854,6 +29789,43 @@ def __init__( self.provisioning_status = kwargs['provisioning_status'] +class TriggerDependencyReference(DependencyReference): + """Trigger referenced dependency. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: TumblingWindowTriggerDependencyReference. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. The type of dependency reference.Constant filled by server. + :type type: str + :param reference_trigger: Required. Referenced trigger. + :type reference_trigger: ~azure.synapse.artifacts.models.TriggerReference + """ + + _validation = { + 'type': {'required': True}, + 'reference_trigger': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_trigger': {'key': 'referenceTrigger', 'type': 'TriggerReference'}, + } + + _subtype_map = { + 'type': {'TumblingWindowTriggerDependencyReference': 'TumblingWindowTriggerDependencyReference'} + } + + def __init__( + self, + **kwargs + ): + super(TriggerDependencyReference, self).__init__(**kwargs) + self.type = 'TriggerDependencyReference' # type: str + self.reference_trigger = kwargs['reference_trigger'] + + class TriggerListResponse(msrest.serialization.Model): """A list of trigger resources. @@ -18906,20 +29878,52 @@ def __init__( self.parameters = kwargs.get('parameters', None) -class TriggerResource(SubResource): +class TriggerReference(msrest.serialization.Model): + """Trigger reference type. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Trigger reference type. Possible values include: "TriggerReference". + :type type: str or ~azure.synapse.artifacts.models.TriggerReferenceType + :param reference_name: Required. Reference trigger name. + :type reference_name: str + """ + + _validation = { + 'type': {'required': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(TriggerReference, self).__init__(**kwargs) + self.type = kwargs['type'] + self.reference_name = kwargs['reference_name'] + + +class TriggerResource(AzureEntityResource): """Trigger resource type. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :ivar id: The resource identifier. + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. :vartype id: str - :ivar name: The resource name. + :ivar name: The name of the resource. :vartype name: str - :ivar type: The resource type. + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. :vartype type: str - :ivar etag: Etag identifies change in the resource. + :ivar etag: Resource Etag. :vartype etag: str :param properties: Required. Properties of the trigger. :type properties: ~azure.synapse.artifacts.models.Trigger @@ -19010,70 +30014,203 @@ def __init__( self.trigger_type = None self.trigger_run_timestamp = None self.status = None - self.message = None - self.properties = None - self.triggered_pipelines = None + self.message = None + self.properties = None + self.triggered_pipelines = None + + +class TriggerRunsQueryResponse(msrest.serialization.Model): + """A list of trigger runs. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of trigger runs. + :type value: list[~azure.synapse.artifacts.models.TriggerRun] + :param continuation_token: The continuation token for getting the next page of results, if any + remaining results exist, null otherwise. + :type continuation_token: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[TriggerRun]'}, + 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(TriggerRunsQueryResponse, self).__init__(**kwargs) + self.value = kwargs['value'] + self.continuation_token = kwargs.get('continuation_token', None) + + +class TriggerSubscriptionOperationStatus(msrest.serialization.Model): + """Defines the response of a trigger subscription operation. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar trigger_name: Trigger name. + :vartype trigger_name: str + :ivar status: Event Subscription Status. Possible values include: "Enabled", "Provisioning", + "Deprovisioning", "Disabled", "Unknown". + :vartype status: str or ~azure.synapse.artifacts.models.EventSubscriptionStatus + """ + + _validation = { + 'trigger_name': {'readonly': True}, + 'status': {'readonly': True}, + } + + _attribute_map = { + 'trigger_name': {'key': 'triggerName', 'type': 'str'}, + 'status': {'key': 'status', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(TriggerSubscriptionOperationStatus, self).__init__(**kwargs) + self.trigger_name = None + self.status = None -class TriggerRunsQueryResponse(msrest.serialization.Model): - """A list of trigger runs. +class TumblingWindowTrigger(Trigger): + """Trigger that schedules pipeline runs for all fixed time interval windows from a start time without gaps and also supports backfill scenarios (when start time is in the past). + + Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :param value: Required. List of trigger runs. - :type value: list[~azure.synapse.artifacts.models.TriggerRun] - :param continuation_token: The continuation token for getting the next page of results, if any - remaining results exist, null otherwise. - :type continuation_token: str + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Trigger type.Constant filled by server. + :type type: str + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are + called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". + :vartype runtime_state: str or ~azure.synapse.artifacts.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the trigger. + :type annotations: list[object] + :param pipeline: Required. Pipeline for which runs are created when an event is fired for + trigger window that is ready. + :type pipeline: ~azure.synapse.artifacts.models.TriggerPipelineReference + :param frequency: Required. The frequency of the time windows. Possible values include: + "Minute", "Hour". + :type frequency: str or ~azure.synapse.artifacts.models.TumblingWindowFrequency + :param interval: Required. The interval of the time windows. The minimum interval allowed is 15 + Minutes. + :type interval: int + :param start_time: Required. The start time for the time period for the trigger during which + events are fired for windows that are ready. Only UTC time is currently supported. + :type start_time: ~datetime.datetime + :param end_time: The end time for the time period for the trigger during which events are fired + for windows that are ready. Only UTC time is currently supported. + :type end_time: ~datetime.datetime + :param delay: Specifies how long the trigger waits past due time before triggering new run. It + doesn't alter window start and end time. The default is 0. Type: string (or Expression with + resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type delay: object + :param max_concurrency: Required. The max number of parallel time windows (ready for execution) + for which a new run is triggered. + :type max_concurrency: int + :param retry_policy: Retry policy that will be applied for failed pipeline runs. + :type retry_policy: ~azure.synapse.artifacts.models.RetryPolicy + :param depends_on: Triggers that this trigger depends on. Only tumbling window triggers are + supported. + :type depends_on: list[~azure.synapse.artifacts.models.DependencyReference] """ _validation = { - 'value': {'required': True}, + 'type': {'required': True}, + 'runtime_state': {'readonly': True}, + 'pipeline': {'required': True}, + 'frequency': {'required': True}, + 'interval': {'required': True}, + 'start_time': {'required': True}, + 'max_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, } _attribute_map = { - 'value': {'key': 'value', 'type': '[TriggerRun]'}, - 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'pipeline': {'key': 'pipeline', 'type': 'TriggerPipelineReference'}, + 'frequency': {'key': 'typeProperties.frequency', 'type': 'str'}, + 'interval': {'key': 'typeProperties.interval', 'type': 'int'}, + 'start_time': {'key': 'typeProperties.startTime', 'type': 'iso-8601'}, + 'end_time': {'key': 'typeProperties.endTime', 'type': 'iso-8601'}, + 'delay': {'key': 'typeProperties.delay', 'type': 'object'}, + 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, + 'retry_policy': {'key': 'typeProperties.retryPolicy', 'type': 'RetryPolicy'}, + 'depends_on': {'key': 'typeProperties.dependsOn', 'type': '[DependencyReference]'}, } def __init__( self, **kwargs ): - super(TriggerRunsQueryResponse, self).__init__(**kwargs) - self.value = kwargs['value'] - self.continuation_token = kwargs.get('continuation_token', None) + super(TumblingWindowTrigger, self).__init__(**kwargs) + self.type = 'TumblingWindowTrigger' # type: str + self.pipeline = kwargs['pipeline'] + self.frequency = kwargs['frequency'] + self.interval = kwargs['interval'] + self.start_time = kwargs['start_time'] + self.end_time = kwargs.get('end_time', None) + self.delay = kwargs.get('delay', None) + self.max_concurrency = kwargs['max_concurrency'] + self.retry_policy = kwargs.get('retry_policy', None) + self.depends_on = kwargs.get('depends_on', None) -class TriggerSubscriptionOperationStatus(msrest.serialization.Model): - """Defines the response of a trigger subscription operation. +class TumblingWindowTriggerDependencyReference(TriggerDependencyReference): + """Referenced tumbling window trigger dependency. - Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. - :ivar trigger_name: Trigger name. - :vartype trigger_name: str - :ivar status: Event Subscription Status. Possible values include: "Enabled", "Provisioning", - "Deprovisioning", "Disabled", "Unknown". - :vartype status: str or ~azure.synapse.artifacts.models.EventSubscriptionStatus + :param type: Required. The type of dependency reference.Constant filled by server. + :type type: str + :param reference_trigger: Required. Referenced trigger. + :type reference_trigger: ~azure.synapse.artifacts.models.TriggerReference + :param offset: Timespan applied to the start time of a tumbling window when evaluating + dependency. + :type offset: str + :param size: The size of the window when evaluating the dependency. If undefined the frequency + of the tumbling window will be used. + :type size: str """ _validation = { - 'trigger_name': {'readonly': True}, - 'status': {'readonly': True}, + 'type': {'required': True}, + 'reference_trigger': {'required': True}, + 'offset': {'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, + 'size': {'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, } _attribute_map = { - 'trigger_name': {'key': 'triggerName', 'type': 'str'}, - 'status': {'key': 'status', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'reference_trigger': {'key': 'referenceTrigger', 'type': 'TriggerReference'}, + 'offset': {'key': 'offset', 'type': 'str'}, + 'size': {'key': 'size', 'type': 'str'}, } def __init__( self, **kwargs ): - super(TriggerSubscriptionOperationStatus, self).__init__(**kwargs) - self.trigger_name = None - self.status = None + super(TumblingWindowTriggerDependencyReference, self).__init__(**kwargs) + self.type = 'TumblingWindowTriggerDependencyReference' # type: str + self.offset = kwargs.get('offset', None) + self.size = kwargs.get('size', None) class UntilActivity(Activity): @@ -19131,7 +30268,7 @@ def __init__( **kwargs ): super(UntilActivity, self).__init__(**kwargs) - self.type = 'Until' + self.type = 'Until' # type: str self.expression = kwargs['expression'] self.timeout = kwargs.get('timeout', None) self.activities = kwargs['activities'] @@ -19230,7 +30367,7 @@ def __init__( **kwargs ): super(ValidationActivity, self).__init__(**kwargs) - self.type = 'Validation' + self.type = 'Validation' # type: str self.timeout = kwargs.get('timeout', None) self.sleep = kwargs.get('sleep', None) self.minimum_size = kwargs.get('minimum_size', None) @@ -19318,12 +30455,62 @@ def __init__( **kwargs ): super(VerticaLinkedService, self).__init__(**kwargs) - self.type = 'Vertica' + self.type = 'Vertica' # type: str self.connection_string = kwargs.get('connection_string', None) self.pwd = kwargs.get('pwd', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) +class VerticaSource(TabularSource): + """A copy activity Vertica source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(VerticaSource, self).__init__(**kwargs) + self.type = 'VerticaSource' # type: str + self.query = kwargs.get('query', None) + + class VerticaTableDataset(Dataset): """Vertica dataset. @@ -19387,12 +30574,31 @@ def __init__( **kwargs ): super(VerticaTableDataset, self).__init__(**kwargs) - self.type = 'VerticaTable' + self.type = 'VerticaTable' # type: str self.table_name = kwargs.get('table_name', None) self.table = kwargs.get('table', None) self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) +class VirtualNetworkProfile(msrest.serialization.Model): + """Virtual Network Profile. + + :param compute_subnet_id: Subnet ID used for computes in workspace. + :type compute_subnet_id: str + """ + + _attribute_map = { + 'compute_subnet_id': {'key': 'computeSubnetId', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(VirtualNetworkProfile, self).__init__(**kwargs) + self.compute_subnet_id = kwargs.get('compute_subnet_id', None) + + class WaitActivity(Activity): """This activity suspends pipeline execution for the specified interval. @@ -19436,7 +30642,7 @@ def __init__( **kwargs ): super(WaitActivity, self).__init__(**kwargs) - self.type = 'Wait' + self.type = 'Wait' # type: str self.wait_time_in_seconds = kwargs['wait_time_in_seconds'] @@ -19516,7 +30722,7 @@ def __init__( **kwargs ): super(WebActivity, self).__init__(**kwargs) - self.type = 'WebActivity' + self.type = 'WebActivity' # type: str self.method = kwargs['method'] self.url = kwargs['url'] self.headers = kwargs.get('headers', None) @@ -19606,7 +30812,7 @@ def __init__( ): super(WebLinkedServiceTypeProperties, self).__init__(**kwargs) self.url = kwargs['url'] - self.authentication_type = None + self.authentication_type = None # type: Optional[str] class WebAnonymousAuthentication(WebLinkedServiceTypeProperties): @@ -19638,7 +30844,7 @@ def __init__( **kwargs ): super(WebAnonymousAuthentication, self).__init__(**kwargs) - self.authentication_type = 'Anonymous' + self.authentication_type = 'Anonymous' # type: str class WebBasicAuthentication(WebLinkedServiceTypeProperties): @@ -19679,7 +30885,7 @@ def __init__( **kwargs ): super(WebBasicAuthentication, self).__init__(**kwargs) - self.authentication_type = 'Basic' + self.authentication_type = 'Basic' # type: str self.username = kwargs['username'] self.password = kwargs['password'] @@ -19721,7 +30927,7 @@ def __init__( **kwargs ): super(WebClientCertificateAuthentication, self).__init__(**kwargs) - self.authentication_type = 'ClientCertificate' + self.authentication_type = 'ClientCertificate' # type: str self.pfx = kwargs['pfx'] self.password = kwargs['password'] @@ -19729,8 +30935,6 @@ def __init__( class WebHookActivity(Activity): """WebHook activity. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this @@ -19746,8 +30950,8 @@ class WebHookActivity(Activity): :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :ivar method: Required. Rest API method for target endpoint. Default value: "POST". - :vartype method: str + :param method: Required. Rest API method for target endpoint. Possible values include: "POST". + :type method: str or ~azure.synapse.artifacts.models.WebHookActivityMethod :param url: Required. WebHook activity target endpoint and path. Type: string (or Expression with resultType string). :type url: object @@ -19774,7 +30978,7 @@ class WebHookActivity(Activity): _validation = { 'name': {'required': True}, 'type': {'required': True}, - 'method': {'required': True, 'constant': True}, + 'method': {'required': True}, 'url': {'required': True}, } @@ -19794,14 +30998,13 @@ class WebHookActivity(Activity): 'report_status_on_call_back': {'key': 'typeProperties.reportStatusOnCallBack', 'type': 'object'}, } - method = "POST" - def __init__( self, **kwargs ): super(WebHookActivity, self).__init__(**kwargs) - self.type = 'WebHook' + self.type = 'WebHook' # type: str + self.method = kwargs['method'] self.url = kwargs['url'] self.timeout = kwargs.get('timeout', None) self.headers = kwargs.get('headers', None) @@ -19852,10 +31055,51 @@ def __init__( **kwargs ): super(WebLinkedService, self).__init__(**kwargs) - self.type = 'Web' + self.type = 'Web' # type: str self.type_properties = kwargs['type_properties'] +class WebSource(CopySource): + """A copy activity source for web page table. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(WebSource, self).__init__(**kwargs) + self.type = 'WebSource' # type: str + + class WebTableDataset(Dataset): """The dataset points to a HTML table in the web page. @@ -19916,69 +31160,85 @@ def __init__( **kwargs ): super(WebTableDataset, self).__init__(**kwargs) - self.type = 'WebTable' + self.type = 'WebTable' # type: str self.index = kwargs['index'] self.path = kwargs.get('path', None) -class Workspace(Resource): - """Workspace resource type. +class Workspace(TrackedResource): + """A workspace. Variables are only populated by the server, and will be ignored when sending a request. - :ivar id: The resource identifier. + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. :vartype id: str - :ivar name: The resource name. + :ivar name: The name of the resource. :vartype name: str - :ivar type: The resource type. + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. :vartype type: str - :param location: The resource location. - :type location: str - :param tags: A set of tags. The resource tags. + :param tags: A set of tags. Resource tags. :type tags: dict[str, str] - :ivar e_tag: Etag identifies change in the resource. - :vartype e_tag: str - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param identity: Managed service identity of the workspace. - :type identity: ~azure.synapse.artifacts.models.WorkspaceIdentity - :ivar provisioning_state: Workspace provisioning state, example Succeeded. + :param location: Required. The geo-location where the resource lives. + :type location: str + :param identity: Identity of the workspace. + :type identity: ~azure.synapse.artifacts.models.ManagedIdentity + :param default_data_lake_storage: Workspace default data lake storage account details. + :type default_data_lake_storage: ~azure.synapse.artifacts.models.DataLakeStorageAccountDetails + :param sql_administrator_login_password: SQL administrator login password. + :type sql_administrator_login_password: str + :param managed_resource_group_name: Workspace managed resource group. The resource group name + uniquely identifies the resource group within the user subscriptionId. The resource group name + must be no longer than 90 characters long, and must be alphanumeric characters + (Char.IsLetterOrDigit()) and '-', '_', '(', ')' and'.'. Note that the name cannot end with '.'. + :type managed_resource_group_name: str + :ivar provisioning_state: Resource provisioning state. :vartype provisioning_state: str - :ivar create_time: Time the workspace was created in ISO8601 format. - :vartype create_time: ~datetime.datetime - :ivar version: Version of the workspace. - :vartype version: str - :param default_storage: Linked service reference. - :type default_storage: ~azure.synapse.artifacts.models.LinkedServiceReference - :param default_sql_server: Linked service reference. - :type default_sql_server: ~azure.synapse.artifacts.models.LinkedServiceReference + :param sql_administrator_login: Login for workspace SQL active directory administrator. + :type sql_administrator_login: str + :param virtual_network_profile: Virtual Network profile. + :type virtual_network_profile: ~azure.synapse.artifacts.models.VirtualNetworkProfile + :param connectivity_endpoints: Connectivity endpoints. + :type connectivity_endpoints: dict[str, str] + :param managed_virtual_network: Setting this to 'default' will ensure that all compute for this + workspace is in a virtual network managed on behalf of the user. + :type managed_virtual_network: str + :param private_endpoint_connections: Private endpoint connections to the workspace. + :type private_endpoint_connections: + list[~azure.synapse.artifacts.models.PrivateEndpointConnection] + :ivar extra_properties: Workspace level configs and feature flags. + :vartype extra_properties: dict[str, object] """ _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, 'type': {'readonly': True}, - 'e_tag': {'readonly': True}, + 'location': {'required': True}, 'provisioning_state': {'readonly': True}, - 'create_time': {'readonly': True}, - 'version': {'readonly': True}, + 'extra_properties': {'readonly': True}, } _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'location': {'key': 'location', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, - 'e_tag': {'key': 'eTag', 'type': 'str'}, - 'additional_properties': {'key': '', 'type': '{object}'}, - 'identity': {'key': 'identity', 'type': 'WorkspaceIdentity'}, + 'location': {'key': 'location', 'type': 'str'}, + 'identity': {'key': 'identity', 'type': 'ManagedIdentity'}, + 'default_data_lake_storage': {'key': 'properties.defaultDataLakeStorage', 'type': 'DataLakeStorageAccountDetails'}, + 'sql_administrator_login_password': {'key': 'properties.sqlAdministratorLoginPassword', 'type': 'str'}, + 'managed_resource_group_name': {'key': 'properties.managedResourceGroupName', 'type': 'str'}, 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, - 'create_time': {'key': 'properties.createTime', 'type': 'iso-8601'}, - 'version': {'key': 'properties.version', 'type': 'str'}, - 'default_storage': {'key': 'properties.defaultStorage', 'type': 'LinkedServiceReference'}, - 'default_sql_server': {'key': 'properties.defaultSqlServer', 'type': 'LinkedServiceReference'}, + 'sql_administrator_login': {'key': 'properties.sqlAdministratorLogin', 'type': 'str'}, + 'virtual_network_profile': {'key': 'properties.virtualNetworkProfile', 'type': 'VirtualNetworkProfile'}, + 'connectivity_endpoints': {'key': 'properties.connectivityEndpoints', 'type': '{str}'}, + 'managed_virtual_network': {'key': 'properties.managedVirtualNetwork', 'type': 'str'}, + 'private_endpoint_connections': {'key': 'properties.privateEndpointConnections', 'type': '[PrivateEndpointConnection]'}, + 'extra_properties': {'key': 'properties.extraProperties', 'type': '{object}'}, } def __init__( @@ -19986,13 +31246,17 @@ def __init__( **kwargs ): super(Workspace, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) self.identity = kwargs.get('identity', None) + self.default_data_lake_storage = kwargs.get('default_data_lake_storage', None) + self.sql_administrator_login_password = kwargs.get('sql_administrator_login_password', None) + self.managed_resource_group_name = kwargs.get('managed_resource_group_name', None) self.provisioning_state = None - self.create_time = None - self.version = None - self.default_storage = kwargs.get('default_storage', None) - self.default_sql_server = kwargs.get('default_sql_server', None) + self.sql_administrator_login = kwargs.get('sql_administrator_login', None) + self.virtual_network_profile = kwargs.get('virtual_network_profile', None) + self.connectivity_endpoints = kwargs.get('connectivity_endpoints', None) + self.managed_virtual_network = kwargs.get('managed_virtual_network', None) + self.private_endpoint_connections = kwargs.get('private_endpoint_connections', None) + self.extra_properties = None class WorkspaceIdentity(msrest.serialization.Model): @@ -20125,7 +31389,7 @@ def __init__( **kwargs ): super(XeroLinkedService, self).__init__(**kwargs) - self.type = 'Xero' + self.type = 'Xero' # type: str self.host = kwargs['host'] self.consumer_key = kwargs.get('consumer_key', None) self.private_key = kwargs.get('private_key', None) @@ -20189,10 +31453,60 @@ def __init__( **kwargs ): super(XeroObjectDataset, self).__init__(**kwargs) - self.type = 'XeroObject' + self.type = 'XeroObject' # type: str self.table_name = kwargs.get('table_name', None) +class XeroSource(TabularSource): + """A copy activity Xero Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(XeroSource, self).__init__(**kwargs) + self.type = 'XeroSource' # type: str + self.query = kwargs.get('query', None) + + class ZohoLinkedService(LinkedService): """Zoho server linked service. @@ -20256,7 +31570,7 @@ def __init__( **kwargs ): super(ZohoLinkedService, self).__init__(**kwargs) - self.type = 'Zoho' + self.type = 'Zoho' # type: str self.endpoint = kwargs['endpoint'] self.access_token = kwargs.get('access_token', None) self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) @@ -20319,5 +31633,55 @@ def __init__( **kwargs ): super(ZohoObjectDataset, self).__init__(**kwargs) - self.type = 'ZohoObject' + self.type = 'ZohoObject' # type: str self.table_name = kwargs.get('table_name', None) + + +class ZohoSource(TabularSource): + """A copy activity Zoho server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(ZohoSource, self).__init__(**kwargs) + self.type = 'ZohoSource' # type: str + self.query = kwargs.get('query', None) diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_models_py3.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_models_py3.py index 486baee4e7de..429ae1a8680d 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_models_py3.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_models_py3.py @@ -19,7 +19,7 @@ class Activity(msrest.serialization.Model): """A pipeline activity. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AppendVariableActivity, ControlActivity, ExecutePipelineActivity, ExecutionActivity, FilterActivity, ForEachActivity, IfConditionActivity, SetVariableActivity, SwitchActivity, UntilActivity, ValidationActivity, WaitActivity, WebHookActivity. + sub-classes are: AppendVariableActivity, ControlActivity, ExecutePipelineActivity, ExecutionActivity, FilterActivity, ForEachActivity, IfConditionActivity, SetVariableActivity, SynapseSparkJobDefinitionActivity, SqlPoolStoredProcedureActivity, SwitchActivity, SynapseNotebookActivity, UntilActivity, ValidationActivity, WaitActivity, WebHookActivity. All required parameters must be populated in order to send to Azure. @@ -53,7 +53,7 @@ class Activity(msrest.serialization.Model): } _subtype_map = { - 'type': {'AppendVariable': 'AppendVariableActivity', 'Container': 'ControlActivity', 'ExecutePipeline': 'ExecutePipelineActivity', 'Execution': 'ExecutionActivity', 'Filter': 'FilterActivity', 'ForEach': 'ForEachActivity', 'IfCondition': 'IfConditionActivity', 'SetVariable': 'SetVariableActivity', 'Switch': 'SwitchActivity', 'Until': 'UntilActivity', 'Validation': 'ValidationActivity', 'Wait': 'WaitActivity', 'WebHook': 'WebHookActivity'} + 'type': {'AppendVariable': 'AppendVariableActivity', 'Container': 'ControlActivity', 'ExecutePipeline': 'ExecutePipelineActivity', 'Execution': 'ExecutionActivity', 'Filter': 'FilterActivity', 'ForEach': 'ForEachActivity', 'IfCondition': 'IfConditionActivity', 'SetVariable': 'SetVariableActivity', 'SparkJob': 'SynapseSparkJobDefinitionActivity', 'SqlPoolStoredProcedure': 'SqlPoolStoredProcedureActivity', 'Switch': 'SwitchActivity', 'SynapseNotebook': 'SynapseNotebookActivity', 'Until': 'UntilActivity', 'Validation': 'ValidationActivity', 'Wait': 'WaitActivity', 'WebHook': 'WebHookActivity'} } def __init__( @@ -69,7 +69,7 @@ def __init__( super(Activity, self).__init__(**kwargs) self.additional_properties = additional_properties self.name = name - self.type: str = 'Activity' + self.type = 'Activity' # type: str self.description = description self.depends_on = depends_on self.user_properties = user_properties @@ -369,7 +369,7 @@ def __init__( ): super(LinkedService, self).__init__(**kwargs) self.additional_properties = additional_properties - self.type: str = 'LinkedService' + self.type = 'LinkedService' # type: str self.connect_via = connect_via self.description = description self.parameters = parameters @@ -473,7 +473,7 @@ def __init__( **kwargs ): super(AmazonMWSLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'AmazonMWS' + self.type = 'AmazonMWS' # type: str self.endpoint = endpoint self.marketplace_id = marketplace_id self.seller_id = seller_id @@ -490,7 +490,7 @@ class Dataset(msrest.serialization.Model): """The Azure Data Factory nested object which identifies data within different data stores, such as tables, files, folders, and documents. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AmazonMWSObjectDataset, AmazonRedshiftTableDataset, AvroDataset, AzureDataExplorerTableDataset, AzureMariaDBTableDataset, AzureMySqlTableDataset, AzurePostgreSqlTableDataset, AzureSearchIndexDataset, AzureSqlDWTableDataset, AzureSqlMITableDataset, AzureSqlTableDataset, AzureTableDataset, BinaryDataset, CassandraTableDataset, CommonDataServiceForAppsEntityDataset, ConcurObjectDataset, CosmosDbMongoDbApiCollectionDataset, CosmosDbSqlApiCollectionDataset, CouchbaseTableDataset, Db2TableDataset, DelimitedTextDataset, DocumentDbCollectionDataset, DrillTableDataset, DynamicsAXResourceDataset, DynamicsCrmEntityDataset, DynamicsEntityDataset, EloquaObjectDataset, GoogleAdWordsObjectDataset, GoogleBigQueryObjectDataset, GreenplumTableDataset, HBaseObjectDataset, HiveObjectDataset, HubspotObjectDataset, ImpalaObjectDataset, InformixTableDataset, JiraObjectDataset, JsonDataset, MagentoObjectDataset, MariaDBTableDataset, MarketoObjectDataset, MicrosoftAccessTableDataset, MongoDbCollectionDataset, MongoDbV2CollectionDataset, MySqlTableDataset, NetezzaTableDataset, ODataResourceDataset, OdbcTableDataset, Office365Dataset, OracleServiceCloudObjectDataset, OracleTableDataset, OrcDataset, ParquetDataset, PaypalObjectDataset, PhoenixObjectDataset, PostgreSqlTableDataset, PrestoObjectDataset, QuickBooksObjectDataset, RelationalTableDataset, ResponsysObjectDataset, RestResourceDataset, SalesforceMarketingCloudObjectDataset, SalesforceObjectDataset, SalesforceServiceCloudObjectDataset, SapBwCubeDataset, SapCloudForCustomerResourceDataset, SapEccResourceDataset, SapHanaTableDataset, SapOpenHubTableDataset, SapTableResourceDataset, ServiceNowObjectDataset, ShopifyObjectDataset, SparkObjectDataset, SqlServerTableDataset, SquareObjectDataset, SybaseTableDataset, TeradataTableDataset, VerticaTableDataset, WebTableDataset, XeroObjectDataset, ZohoObjectDataset. + sub-classes are: AmazonMWSObjectDataset, AmazonRedshiftTableDataset, AvroDataset, AzureDataExplorerTableDataset, AzureMariaDBTableDataset, AzureMySqlTableDataset, AzurePostgreSqlTableDataset, AzureSearchIndexDataset, AzureSqlDWTableDataset, AzureSqlMITableDataset, AzureSqlTableDataset, AzureTableDataset, BinaryDataset, CassandraTableDataset, CommonDataServiceForAppsEntityDataset, ConcurObjectDataset, CosmosDbMongoDbApiCollectionDataset, CosmosDbSqlApiCollectionDataset, CouchbaseTableDataset, CustomDataset, Db2TableDataset, DelimitedTextDataset, DocumentDbCollectionDataset, DrillTableDataset, DynamicsAXResourceDataset, DynamicsCrmEntityDataset, DynamicsEntityDataset, EloquaObjectDataset, GoogleAdWordsObjectDataset, GoogleBigQueryObjectDataset, GreenplumTableDataset, HBaseObjectDataset, HiveObjectDataset, HubspotObjectDataset, ImpalaObjectDataset, InformixTableDataset, JiraObjectDataset, JsonDataset, MagentoObjectDataset, MariaDBTableDataset, MarketoObjectDataset, MicrosoftAccessTableDataset, MongoDbCollectionDataset, MongoDbV2CollectionDataset, MySqlTableDataset, NetezzaTableDataset, ODataResourceDataset, OdbcTableDataset, Office365Dataset, OracleServiceCloudObjectDataset, OracleTableDataset, OrcDataset, ParquetDataset, PaypalObjectDataset, PhoenixObjectDataset, PostgreSqlTableDataset, PrestoObjectDataset, QuickBooksObjectDataset, RelationalTableDataset, ResponsysObjectDataset, RestResourceDataset, SalesforceMarketingCloudObjectDataset, SalesforceObjectDataset, SalesforceServiceCloudObjectDataset, SapBwCubeDataset, SapCloudForCustomerResourceDataset, SapEccResourceDataset, SapHanaTableDataset, SapOpenHubTableDataset, SapTableResourceDataset, ServiceNowObjectDataset, ShopifyObjectDataset, SparkObjectDataset, SqlServerTableDataset, SquareObjectDataset, SybaseTableDataset, TeradataTableDataset, VerticaTableDataset, WebTableDataset, XeroObjectDataset, ZohoObjectDataset. All required parameters must be populated in order to send to Azure. @@ -536,7 +536,7 @@ class Dataset(msrest.serialization.Model): } _subtype_map = { - 'type': {'AmazonMWSObject': 'AmazonMWSObjectDataset', 'AmazonRedshiftTable': 'AmazonRedshiftTableDataset', 'Avro': 'AvroDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'AzureMariaDBTable': 'AzureMariaDBTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'Binary': 'BinaryDataset', 'CassandraTable': 'CassandraTableDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'ConcurObject': 'ConcurObjectDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'CosmosDbSqlApiCollection': 'CosmosDbSqlApiCollectionDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'Db2Table': 'Db2TableDataset', 'DelimitedText': 'DelimitedTextDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'DrillTable': 'DrillTableDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'EloquaObject': 'EloquaObjectDataset', 'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'HBaseObject': 'HBaseObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'InformixTable': 'InformixTableDataset', 'JiraObject': 'JiraObjectDataset', 'Json': 'JsonDataset', 'MagentoObject': 'MagentoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MarketoObject': 'MarketoObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MySqlTable': 'MySqlTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ODataResource': 'ODataResourceDataset', 'OdbcTable': 'OdbcTableDataset', 'Office365Table': 'Office365Dataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'OracleTable': 'OracleTableDataset', 'Orc': 'OrcDataset', 'Parquet': 'ParquetDataset', 'PaypalObject': 'PaypalObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'PrestoObject': 'PrestoObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'RestResource': 'RestResourceDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SapBwCube': 'SapBwCubeDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'SparkObject': 'SparkObjectDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SquareObject': 'SquareObjectDataset', 'SybaseTable': 'SybaseTableDataset', 'TeradataTable': 'TeradataTableDataset', 'VerticaTable': 'VerticaTableDataset', 'WebTable': 'WebTableDataset', 'XeroObject': 'XeroObjectDataset', 'ZohoObject': 'ZohoObjectDataset'} + 'type': {'AmazonMWSObject': 'AmazonMWSObjectDataset', 'AmazonRedshiftTable': 'AmazonRedshiftTableDataset', 'Avro': 'AvroDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'AzureMariaDBTable': 'AzureMariaDBTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'Binary': 'BinaryDataset', 'CassandraTable': 'CassandraTableDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'ConcurObject': 'ConcurObjectDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'CosmosDbSqlApiCollection': 'CosmosDbSqlApiCollectionDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'CustomDataset': 'CustomDataset', 'Db2Table': 'Db2TableDataset', 'DelimitedText': 'DelimitedTextDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'DrillTable': 'DrillTableDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'EloquaObject': 'EloquaObjectDataset', 'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'HBaseObject': 'HBaseObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'InformixTable': 'InformixTableDataset', 'JiraObject': 'JiraObjectDataset', 'Json': 'JsonDataset', 'MagentoObject': 'MagentoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MarketoObject': 'MarketoObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MySqlTable': 'MySqlTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ODataResource': 'ODataResourceDataset', 'OdbcTable': 'OdbcTableDataset', 'Office365Table': 'Office365Dataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'OracleTable': 'OracleTableDataset', 'Orc': 'OrcDataset', 'Parquet': 'ParquetDataset', 'PaypalObject': 'PaypalObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'PrestoObject': 'PrestoObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'RestResource': 'RestResourceDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SapBwCube': 'SapBwCubeDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'SparkObject': 'SparkObjectDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SquareObject': 'SquareObjectDataset', 'SybaseTable': 'SybaseTableDataset', 'TeradataTable': 'TeradataTableDataset', 'VerticaTable': 'VerticaTableDataset', 'WebTable': 'WebTableDataset', 'XeroObject': 'XeroObjectDataset', 'ZohoObject': 'ZohoObjectDataset'} } def __init__( @@ -554,7 +554,7 @@ def __init__( ): super(Dataset, self).__init__(**kwargs) self.additional_properties = additional_properties - self.type: str = 'Dataset' + self.type = 'Dataset' # type: str self.description = description self.structure = structure self.schema = schema @@ -628,10 +628,183 @@ def __init__( **kwargs ): super(AmazonMWSObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'AmazonMWSObject' + self.type = 'AmazonMWSObject' # type: str self.table_name = table_name +class CopySource(msrest.serialization.Model): + """A copy activity source. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AvroSource, AzureBlobFSSource, AzureDataExplorerSource, AzureDataLakeStoreSource, BinarySource, BlobSource, CommonDataServiceForAppsSource, CosmosDbMongoDbApiSource, CosmosDbSqlApiSource, DelimitedTextSource, DocumentDbCollectionSource, DynamicsCrmSource, DynamicsSource, FileSystemSource, HdfsSource, HttpSource, JsonSource, MicrosoftAccessSource, MongoDbSource, MongoDbV2Source, ODataSource, Office365Source, OracleSource, OrcSource, ParquetSource, RelationalSource, RestSource, SalesforceServiceCloudSource, TabularSource, WebSource. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + } + + _subtype_map = { + 'type': {'AvroSource': 'AvroSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'BinarySource': 'BinarySource', 'BlobSource': 'BlobSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'CosmosDbSqlApiSource': 'CosmosDbSqlApiSource', 'DelimitedTextSource': 'DelimitedTextSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'FileSystemSource': 'FileSystemSource', 'HdfsSource': 'HdfsSource', 'HttpSource': 'HttpSource', 'JsonSource': 'JsonSource', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'MongoDbSource': 'MongoDbSource', 'MongoDbV2Source': 'MongoDbV2Source', 'ODataSource': 'ODataSource', 'Office365Source': 'Office365Source', 'OracleSource': 'OracleSource', 'OrcSource': 'OrcSource', 'ParquetSource': 'ParquetSource', 'RelationalSource': 'RelationalSource', 'RestSource': 'RestSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'TabularSource': 'TabularSource', 'WebSource': 'WebSource'} + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + **kwargs + ): + super(CopySource, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = 'CopySource' # type: str + self.source_retry_count = source_retry_count + self.source_retry_wait = source_retry_wait + self.max_concurrent_connections = max_concurrent_connections + + +class TabularSource(CopySource): + """Copy activity sources of tabular type. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AmazonMWSSource, AmazonRedshiftSource, AzureMariaDBSource, AzureMySqlSource, AzurePostgreSqlSource, AzureSqlSource, AzureTableSource, CassandraSource, ConcurSource, CouchbaseSource, Db2Source, DrillSource, DynamicsAXSource, EloquaSource, GoogleAdWordsSource, GoogleBigQuerySource, GreenplumSource, HBaseSource, HiveSource, HubspotSource, ImpalaSource, InformixSource, JiraSource, MagentoSource, MariaDBSource, MarketoSource, MySqlSource, NetezzaSource, OdbcSource, OracleServiceCloudSource, PaypalSource, PhoenixSource, PostgreSqlSource, PrestoSource, QuickBooksSource, ResponsysSource, SalesforceMarketingCloudSource, SalesforceSource, SapBwSource, SapCloudForCustomerSource, SapEccSource, SapHanaSource, SapOpenHubSource, SapTableSource, ServiceNowSource, ShopifySource, SparkSource, SqlDWSource, SqlMISource, SqlServerSource, SqlSource, SquareSource, SybaseSource, TeradataSource, VerticaSource, XeroSource, ZohoSource. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + } + + _subtype_map = { + 'type': {'AmazonMWSSource': 'AmazonMWSSource', 'AmazonRedshiftSource': 'AmazonRedshiftSource', 'AzureMariaDBSource': 'AzureMariaDBSource', 'AzureMySqlSource': 'AzureMySqlSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AzureSqlSource': 'AzureSqlSource', 'AzureTableSource': 'AzureTableSource', 'CassandraSource': 'CassandraSource', 'ConcurSource': 'ConcurSource', 'CouchbaseSource': 'CouchbaseSource', 'Db2Source': 'Db2Source', 'DrillSource': 'DrillSource', 'DynamicsAXSource': 'DynamicsAXSource', 'EloquaSource': 'EloquaSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'GreenplumSource': 'GreenplumSource', 'HBaseSource': 'HBaseSource', 'HiveSource': 'HiveSource', 'HubspotSource': 'HubspotSource', 'ImpalaSource': 'ImpalaSource', 'InformixSource': 'InformixSource', 'JiraSource': 'JiraSource', 'MagentoSource': 'MagentoSource', 'MariaDBSource': 'MariaDBSource', 'MarketoSource': 'MarketoSource', 'MySqlSource': 'MySqlSource', 'NetezzaSource': 'NetezzaSource', 'OdbcSource': 'OdbcSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'PaypalSource': 'PaypalSource', 'PhoenixSource': 'PhoenixSource', 'PostgreSqlSource': 'PostgreSqlSource', 'PrestoSource': 'PrestoSource', 'QuickBooksSource': 'QuickBooksSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'SalesforceSource': 'SalesforceSource', 'SapBwSource': 'SapBwSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SapEccSource': 'SapEccSource', 'SapHanaSource': 'SapHanaSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapTableSource': 'SapTableSource', 'ServiceNowSource': 'ServiceNowSource', 'ShopifySource': 'ShopifySource', 'SparkSource': 'SparkSource', 'SqlDWSource': 'SqlDWSource', 'SqlMISource': 'SqlMISource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'SquareSource': 'SquareSource', 'SybaseSource': 'SybaseSource', 'TeradataSource': 'TeradataSource', 'VerticaSource': 'VerticaSource', 'XeroSource': 'XeroSource', 'ZohoSource': 'ZohoSource'} + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + **kwargs + ): + super(TabularSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'TabularSource' # type: str + self.query_timeout = query_timeout + + +class AmazonMWSSource(TabularSource): + """A copy activity Amazon Marketplace Web Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(AmazonMWSSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'AmazonMWSSource' # type: str + self.query = query + + class AmazonRedshiftLinkedService(LinkedService): """Linked service for Amazon Redshift. @@ -708,7 +881,7 @@ def __init__( **kwargs ): super(AmazonRedshiftLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'AmazonRedshift' + self.type = 'AmazonRedshift' # type: str self.server = server self.username = username self.password = password @@ -717,6 +890,69 @@ def __init__( self.encrypted_credential = encrypted_credential +class AmazonRedshiftSource(TabularSource): + """A copy activity source for Amazon Redshift Source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: Database query. Type: string (or Expression with resultType string). + :type query: object + :param redshift_unload_settings: The Amazon S3 settings needed for the interim Amazon S3 when + copying from Amazon Redshift with unload. With this, data from Amazon Redshift source will be + unloaded into S3 first and then copied into the targeted sink from the interim S3. + :type redshift_unload_settings: ~azure.synapse.artifacts.models.RedshiftUnloadSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + 'redshift_unload_settings': {'key': 'redshiftUnloadSettings', 'type': 'RedshiftUnloadSettings'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + redshift_unload_settings: Optional["RedshiftUnloadSettings"] = None, + **kwargs + ): + super(AmazonRedshiftSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'AmazonRedshiftSource' # type: str + self.query = query + self.redshift_unload_settings = redshift_unload_settings + + class AmazonRedshiftTableDataset(Dataset): """The Amazon Redshift table dataset. @@ -792,7 +1028,7 @@ def __init__( **kwargs ): super(AmazonRedshiftTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'AmazonRedshiftTable' + self.type = 'AmazonRedshiftTable' # type: str self.table_name = table_name self.table = table self.schema_type_properties_schema = schema_type_properties_schema @@ -864,953 +1100,791 @@ def __init__( **kwargs ): super(AmazonS3LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'AmazonS3' + self.type = 'AmazonS3' # type: str self.access_key_id = access_key_id self.secret_access_key = secret_access_key self.service_url = service_url self.encrypted_credential = encrypted_credential -class AppendVariableActivity(Activity): - """Append value for a Variable of type Array. +class DatasetLocation(msrest.serialization.Model): + """Dataset location. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AmazonS3Location, AzureBlobFSLocation, AzureBlobStorageLocation, AzureDataLakeStoreLocation, AzureFileStorageLocation, FileServerLocation, FtpServerLocation, GoogleCloudStorageLocation, HdfsLocation, HttpServerLocation, SftpLocation. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. Type of dataset storage location.Constant filled by server. :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param variable_name: Name of the variable whose value needs to be appended to. - :type variable_name: str - :param value: Value to be appended. Could be a static value or Expression. - :type value: object + :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + string). + :type file_name: object """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'variable_name': {'key': 'typeProperties.variableName', 'type': 'str'}, - 'value': {'key': 'typeProperties.value', 'type': 'object'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + _subtype_map = { + 'type': {'AmazonS3Location': 'AmazonS3Location', 'AzureBlobFSLocation': 'AzureBlobFSLocation', 'AzureBlobStorageLocation': 'AzureBlobStorageLocation', 'AzureDataLakeStoreLocation': 'AzureDataLakeStoreLocation', 'AzureFileStorageLocation': 'AzureFileStorageLocation', 'FileServerLocation': 'FileServerLocation', 'FtpServerLocation': 'FtpServerLocation', 'GoogleCloudStorageLocation': 'GoogleCloudStorageLocation', 'HdfsLocation': 'HdfsLocation', 'HttpServerLocation': 'HttpServerLocation', 'SftpLocation': 'SftpLocation'} } def __init__( self, *, - name: str, additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - depends_on: Optional[List["ActivityDependency"]] = None, - user_properties: Optional[List["UserProperty"]] = None, - variable_name: Optional[str] = None, - value: Optional[object] = None, + folder_path: Optional[object] = None, + file_name: Optional[object] = None, **kwargs ): - super(AppendVariableActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.type: str = 'AppendVariable' - self.variable_name = variable_name - self.value = value + super(DatasetLocation, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = 'DatasetLocation' # type: str + self.folder_path = folder_path + self.file_name = file_name -class AvroDataset(Dataset): - """Avro dataset. +class AmazonS3Location(DatasetLocation): + """The location of amazon S3 dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Type of dataset storage location.Constant filled by server. :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param location: The location of the avro storage. - :type location: ~azure.synapse.artifacts.models.DatasetLocation - :param avro_compression_codec: Possible values include: "none", "deflate", "snappy", "xz", - "bzip2". - :type avro_compression_codec: str or ~azure.synapse.artifacts.models.AvroCompressionCodec - :param avro_compression_level: - :type avro_compression_level: int + :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + string). + :type file_name: object + :param bucket_name: Specify the bucketName of amazon S3. Type: string (or Expression with + resultType string). + :type bucket_name: object + :param version: Specify the version of amazon S3. Type: string (or Expression with resultType + string). + :type version: object """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, - 'avro_compression_level': {'maximum': 9, 'minimum': 1}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'avro_compression_codec': {'key': 'typeProperties.avroCompressionCodec', 'type': 'str'}, - 'avro_compression_level': {'key': 'typeProperties.avroCompressionLevel', 'type': 'int'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'bucket_name': {'key': 'bucketName', 'type': 'object'}, + 'version': {'key': 'version', 'type': 'object'}, } def __init__( self, *, - linked_service_name: "LinkedServiceReference", additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, - location: Optional["DatasetLocation"] = None, - avro_compression_codec: Optional[Union[str, "AvroCompressionCodec"]] = None, - avro_compression_level: Optional[int] = None, + folder_path: Optional[object] = None, + file_name: Optional[object] = None, + bucket_name: Optional[object] = None, + version: Optional[object] = None, **kwargs ): - super(AvroDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'Avro' - self.location = location - self.avro_compression_codec = avro_compression_codec - self.avro_compression_level = avro_compression_level + super(AmazonS3Location, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) + self.type = 'AmazonS3Location' # type: str + self.bucket_name = bucket_name + self.version = version -class AzureBatchLinkedService(LinkedService): - """Azure Batch linked service. +class StoreReadSettings(msrest.serialization.Model): + """Connector read setting. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AmazonS3ReadSettings, AzureBlobFSReadSettings, AzureBlobStorageReadSettings, AzureDataLakeStoreReadSettings, AzureFileStorageReadSettings, FileServerReadSettings, FtpReadSettings, GoogleCloudStorageReadSettings, HdfsReadSettings, HttpReadSettings, SftpReadSettings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. The read setting type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param account_name: Required. The Azure Batch account name. Type: string (or Expression with - resultType string). - :type account_name: object - :param access_key: The Azure Batch account access key. - :type access_key: ~azure.synapse.artifacts.models.SecretBase - :param batch_uri: Required. The Azure Batch URI. Type: string (or Expression with resultType - string). - :type batch_uri: object - :param pool_name: Required. The Azure Batch pool name. Type: string (or Expression with - resultType string). - :type pool_name: object - :param linked_service_name: Required. The Azure Storage linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object """ _validation = { 'type': {'required': True}, - 'account_name': {'required': True}, - 'batch_uri': {'required': True}, - 'pool_name': {'required': True}, - 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'account_name': {'key': 'typeProperties.accountName', 'type': 'object'}, - 'access_key': {'key': 'typeProperties.accessKey', 'type': 'SecretBase'}, - 'batch_uri': {'key': 'typeProperties.batchUri', 'type': 'object'}, - 'pool_name': {'key': 'typeProperties.poolName', 'type': 'object'}, - 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + } + + _subtype_map = { + 'type': {'AmazonS3ReadSettings': 'AmazonS3ReadSettings', 'AzureBlobFSReadSettings': 'AzureBlobFSReadSettings', 'AzureBlobStorageReadSettings': 'AzureBlobStorageReadSettings', 'AzureDataLakeStoreReadSettings': 'AzureDataLakeStoreReadSettings', 'AzureFileStorageReadSettings': 'AzureFileStorageReadSettings', 'FileServerReadSettings': 'FileServerReadSettings', 'FtpReadSettings': 'FtpReadSettings', 'GoogleCloudStorageReadSettings': 'GoogleCloudStorageReadSettings', 'HdfsReadSettings': 'HdfsReadSettings', 'HttpReadSettings': 'HttpReadSettings', 'SftpReadSettings': 'SftpReadSettings'} } def __init__( self, *, - account_name: object, - batch_uri: object, - pool_name: object, - linked_service_name: "LinkedServiceReference", additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, - description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - access_key: Optional["SecretBase"] = None, - encrypted_credential: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, **kwargs ): - super(AzureBatchLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'AzureBatch' - self.account_name = account_name - self.access_key = access_key - self.batch_uri = batch_uri - self.pool_name = pool_name - self.linked_service_name = linked_service_name - self.encrypted_credential = encrypted_credential + super(StoreReadSettings, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = 'StoreReadSettings' # type: str + self.max_concurrent_connections = max_concurrent_connections -class AzureBlobFSLinkedService(LinkedService): - """Azure Data Lake Storage Gen2 linked service. +class AmazonS3ReadSettings(StoreReadSettings): + """Azure data lake store read settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. The read setting type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param url: Required. Endpoint for the Azure Data Lake Storage Gen2 service. Type: string (or - Expression with resultType string). - :type url: object - :param account_key: Account key for the Azure Data Lake Storage Gen2 service. Type: string (or - Expression with resultType string). - :type account_key: object - :param service_principal_id: The ID of the application used to authenticate against the Azure - Data Lake Storage Gen2 account. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The Key of the application used to authenticate against the Azure - Data Lake Storage Gen2 account. - :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal belongs. Type: - string (or Expression with resultType string). - :type tenant: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :type recursive: object + :param wildcard_folder_path: AmazonS3 wildcardFolderPath. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type wildcard_folder_path: object + :param wildcard_file_name: AmazonS3 wildcardFileName. Type: string (or Expression with + resultType string). + :type wildcard_file_name: object + :param prefix: The prefix filter for the S3 object name. Type: string (or Expression with + resultType string). + :type prefix: object + :param enable_partition_discovery: Indicates whether to enable partition discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. Type: string (or + Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression + with resultType string). + :type modified_datetime_end: object """ _validation = { 'type': {'required': True}, - 'url': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'account_key': {'key': 'typeProperties.accountKey', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'prefix': {'key': 'prefix', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } def __init__( self, *, - url: object, additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, - description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - account_key: Optional[object] = None, - service_principal_id: Optional[object] = None, - service_principal_key: Optional["SecretBase"] = None, - tenant: Optional[object] = None, - encrypted_credential: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + recursive: Optional[object] = None, + wildcard_folder_path: Optional[object] = None, + wildcard_file_name: Optional[object] = None, + prefix: Optional[object] = None, + enable_partition_discovery: Optional[bool] = None, + modified_datetime_start: Optional[object] = None, + modified_datetime_end: Optional[object] = None, **kwargs ): - super(AzureBlobFSLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'AzureBlobFS' - self.url = url - self.account_key = account_key - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.tenant = tenant - self.encrypted_credential = encrypted_credential + super(AmazonS3ReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'AmazonS3ReadSettings' # type: str + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.prefix = prefix + self.enable_partition_discovery = enable_partition_discovery + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end -class AzureBlobStorageLinkedService(LinkedService): - """The azure blob storage linked service. +class AppendVariableActivity(Activity): + """Append value for a Variable of type Array. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. + :param description: Activity description. :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: The connection string. It is mutually exclusive with sasUri, - serviceEndpoint property. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param account_key: The Azure key vault secret reference of accountKey in connection string. - :type account_key: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference - :param sas_uri: SAS URI of the Azure Blob Storage resource. It is mutually exclusive with - connectionString, serviceEndpoint property. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type sas_uri: object - :param sas_token: The Azure key vault secret reference of sasToken in sas uri. - :type sas_token: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference - :param service_endpoint: Blob service endpoint of the Azure Blob Storage resource. It is - mutually exclusive with connectionString, sasUri property. - :type service_endpoint: str - :param service_principal_id: The ID of the service principal used to authenticate against Azure - SQL Data Warehouse. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The key of the service principal used to authenticate against - Azure SQL Data Warehouse. - :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal belongs. Type: - string (or Expression with resultType string). - :type tenant: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param variable_name: Name of the variable whose value needs to be appended to. + :type variable_name: str + :param value: Value to be appended. Could be a static value or Expression. + :type value: object """ _validation = { + 'name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, - 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, - 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, - 'service_endpoint': {'key': 'typeProperties.serviceEndpoint', 'type': 'str'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'variable_name': {'key': 'typeProperties.variableName', 'type': 'str'}, + 'value': {'key': 'typeProperties.value', 'type': 'object'}, } def __init__( self, *, + name: str, additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - connection_string: Optional[object] = None, - account_key: Optional["AzureKeyVaultSecretReference"] = None, - sas_uri: Optional[object] = None, - sas_token: Optional["AzureKeyVaultSecretReference"] = None, - service_endpoint: Optional[str] = None, - service_principal_id: Optional[object] = None, - service_principal_key: Optional["SecretBase"] = None, - tenant: Optional[object] = None, - encrypted_credential: Optional[str] = None, + depends_on: Optional[List["ActivityDependency"]] = None, + user_properties: Optional[List["UserProperty"]] = None, + variable_name: Optional[str] = None, + value: Optional[object] = None, **kwargs ): - super(AzureBlobStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'AzureBlobStorage' - self.connection_string = connection_string - self.account_key = account_key - self.sas_uri = sas_uri - self.sas_token = sas_token - self.service_endpoint = service_endpoint - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.tenant = tenant - self.encrypted_credential = encrypted_credential + super(AppendVariableActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.type = 'AppendVariable' # type: str + self.variable_name = variable_name + self.value = value -class AzureDatabricksLinkedService(LinkedService): - """Azure Databricks linked service. +class AutoPauseProperties(msrest.serialization.Model): + """Auto-pausing properties of a Big Data pool powered by Apache Spark. + + :param delay_in_minutes: Number of minutes of idle time before the Big Data pool is + automatically paused. + :type delay_in_minutes: int + :param enabled: Whether auto-pausing is enabled for the Big Data pool. + :type enabled: bool + """ + + _attribute_map = { + 'delay_in_minutes': {'key': 'delayInMinutes', 'type': 'int'}, + 'enabled': {'key': 'enabled', 'type': 'bool'}, + } + + def __init__( + self, + *, + delay_in_minutes: Optional[int] = None, + enabled: Optional[bool] = None, + **kwargs + ): + super(AutoPauseProperties, self).__init__(**kwargs) + self.delay_in_minutes = delay_in_minutes + self.enabled = enabled + + +class AutoScaleProperties(msrest.serialization.Model): + """Auto-scaling properties of a Big Data pool powered by Apache Spark. + + :param min_node_count: The minimum number of nodes the Big Data pool can support. + :type min_node_count: int + :param enabled: Whether automatic scaling is enabled for the Big Data pool. + :type enabled: bool + :param max_node_count: The maximum number of nodes the Big Data pool can support. + :type max_node_count: int + """ + + _attribute_map = { + 'min_node_count': {'key': 'minNodeCount', 'type': 'int'}, + 'enabled': {'key': 'enabled', 'type': 'bool'}, + 'max_node_count': {'key': 'maxNodeCount', 'type': 'int'}, + } + + def __init__( + self, + *, + min_node_count: Optional[int] = None, + enabled: Optional[bool] = None, + max_node_count: Optional[int] = None, + **kwargs + ): + super(AutoScaleProperties, self).__init__(**kwargs) + self.min_node_count = min_node_count + self.enabled = enabled + self.max_node_count = max_node_count + + +class AvroDataset(Dataset): + """Avro dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Type of dataset.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. + :param description: Dataset description. :type description: str - :param parameters: Parameters for linked service. + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. + :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] - :param domain: Required. :code:``.azuredatabricks.net, domain name of your Databricks - deployment. Type: string (or Expression with resultType string). - :type domain: object - :param access_token: Required. Access token for databricks REST API. Refer to - https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string (or Expression - with resultType string). - :type access_token: ~azure.synapse.artifacts.models.SecretBase - :param existing_cluster_id: The id of an existing interactive cluster that will be used for all - runs of this activity. Type: string (or Expression with resultType string). - :type existing_cluster_id: object - :param instance_pool_id: The id of an existing instance pool that will be used for all runs of - this activity. Type: string (or Expression with resultType string). - :type instance_pool_id: object - :param new_cluster_version: If not using an existing interactive cluster, this specifies the - Spark version of a new job cluster or instance pool nodes created for each run of this - activity. Required if instancePoolId is specified. Type: string (or Expression with resultType - string). - :type new_cluster_version: object - :param new_cluster_num_of_worker: If not using an existing interactive cluster, this specifies - the number of worker nodes to use for the new job cluster or instance pool. For new job - clusters, this a string-formatted Int32, like '1' means numOfWorker is 1 or '1:10' means auto- - scale from 1 (min) to 10 (max). For instance pools, this is a string-formatted Int32, and can - only specify a fixed number of worker nodes, such as '2'. Required if newClusterVersion is - specified. Type: string (or Expression with resultType string). - :type new_cluster_num_of_worker: object - :param new_cluster_node_type: The node type of the new job cluster. This property is required - if newClusterVersion is specified and instancePoolId is not specified. If instancePoolId is - specified, this property is ignored. Type: string (or Expression with resultType string). - :type new_cluster_node_type: object - :param new_cluster_spark_conf: A set of optional, user-specified Spark configuration key-value - pairs. - :type new_cluster_spark_conf: dict[str, object] - :param new_cluster_spark_env_vars: A set of optional, user-specified Spark environment - variables key-value pairs. - :type new_cluster_spark_env_vars: dict[str, object] - :param new_cluster_custom_tags: Additional tags for cluster resources. This property is ignored - in instance pool configurations. - :type new_cluster_custom_tags: dict[str, object] - :param new_cluster_driver_node_type: The driver node type for the new job cluster. This - property is ignored in instance pool configurations. Type: string (or Expression with - resultType string). - :type new_cluster_driver_node_type: object - :param new_cluster_init_scripts: User-defined initialization scripts for the new cluster. Type: - array of strings (or Expression with resultType array of strings). - :type new_cluster_init_scripts: object - :param new_cluster_enable_elastic_disk: Enable the elastic disk on the new cluster. This - property is now ignored, and takes the default elastic disk behavior in Databricks (elastic - disks are always enabled). Type: boolean (or Expression with resultType boolean). - :type new_cluster_enable_elastic_disk: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param location: The location of the avro storage. + :type location: ~azure.synapse.artifacts.models.DatasetLocation + :param avro_compression_codec: Possible values include: "none", "deflate", "snappy", "xz", + "bzip2". + :type avro_compression_codec: str or ~azure.synapse.artifacts.models.AvroCompressionCodec + :param avro_compression_level: + :type avro_compression_level: int """ _validation = { 'type': {'required': True}, - 'domain': {'required': True}, - 'access_token': {'required': True}, + 'linked_service_name': {'required': True}, + 'avro_compression_level': {'maximum': 9, 'minimum': 1}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'domain': {'key': 'typeProperties.domain', 'type': 'object'}, - 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, - 'existing_cluster_id': {'key': 'typeProperties.existingClusterId', 'type': 'object'}, - 'instance_pool_id': {'key': 'typeProperties.instancePoolId', 'type': 'object'}, - 'new_cluster_version': {'key': 'typeProperties.newClusterVersion', 'type': 'object'}, - 'new_cluster_num_of_worker': {'key': 'typeProperties.newClusterNumOfWorker', 'type': 'object'}, - 'new_cluster_node_type': {'key': 'typeProperties.newClusterNodeType', 'type': 'object'}, - 'new_cluster_spark_conf': {'key': 'typeProperties.newClusterSparkConf', 'type': '{object}'}, - 'new_cluster_spark_env_vars': {'key': 'typeProperties.newClusterSparkEnvVars', 'type': '{object}'}, - 'new_cluster_custom_tags': {'key': 'typeProperties.newClusterCustomTags', 'type': '{object}'}, - 'new_cluster_driver_node_type': {'key': 'typeProperties.newClusterDriverNodeType', 'type': 'object'}, - 'new_cluster_init_scripts': {'key': 'typeProperties.newClusterInitScripts', 'type': 'object'}, - 'new_cluster_enable_elastic_disk': {'key': 'typeProperties.newClusterEnableElasticDisk', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'avro_compression_codec': {'key': 'typeProperties.avroCompressionCodec', 'type': 'str'}, + 'avro_compression_level': {'key': 'typeProperties.avroCompressionLevel', 'type': 'int'}, } def __init__( self, *, - domain: object, - access_token: "SecretBase", + linked_service_name: "LinkedServiceReference", additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, - existing_cluster_id: Optional[object] = None, - instance_pool_id: Optional[object] = None, - new_cluster_version: Optional[object] = None, - new_cluster_num_of_worker: Optional[object] = None, - new_cluster_node_type: Optional[object] = None, - new_cluster_spark_conf: Optional[Dict[str, object]] = None, - new_cluster_spark_env_vars: Optional[Dict[str, object]] = None, - new_cluster_custom_tags: Optional[Dict[str, object]] = None, - new_cluster_driver_node_type: Optional[object] = None, - new_cluster_init_scripts: Optional[object] = None, - new_cluster_enable_elastic_disk: Optional[object] = None, - encrypted_credential: Optional[object] = None, + folder: Optional["DatasetFolder"] = None, + location: Optional["DatasetLocation"] = None, + avro_compression_codec: Optional[Union[str, "AvroCompressionCodec"]] = None, + avro_compression_level: Optional[int] = None, **kwargs ): - super(AzureDatabricksLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'AzureDatabricks' - self.domain = domain - self.access_token = access_token - self.existing_cluster_id = existing_cluster_id - self.instance_pool_id = instance_pool_id - self.new_cluster_version = new_cluster_version - self.new_cluster_num_of_worker = new_cluster_num_of_worker - self.new_cluster_node_type = new_cluster_node_type - self.new_cluster_spark_conf = new_cluster_spark_conf - self.new_cluster_spark_env_vars = new_cluster_spark_env_vars - self.new_cluster_custom_tags = new_cluster_custom_tags - self.new_cluster_driver_node_type = new_cluster_driver_node_type - self.new_cluster_init_scripts = new_cluster_init_scripts - self.new_cluster_enable_elastic_disk = new_cluster_enable_elastic_disk - self.encrypted_credential = encrypted_credential + super(AvroDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'Avro' # type: str + self.location = location + self.avro_compression_codec = avro_compression_codec + self.avro_compression_level = avro_compression_level -class ExecutionActivity(Activity): - """Base class for all execution activities. +class DatasetStorageFormat(msrest.serialization.Model): + """The format definition of a storage. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureDataExplorerCommandActivity, AzureFunctionActivity, AzureMLBatchExecutionActivity, AzureMLExecutePipelineActivity, AzureMLUpdateResourceActivity, CopyActivity, CustomActivity, DataLakeAnalyticsUSQLActivity, DatabricksNotebookActivity, DatabricksSparkJarActivity, DatabricksSparkPythonActivity, DeleteActivity, ExecuteDataFlowActivity, ExecuteSSISPackageActivity, GetMetadataActivity, HDInsightHiveActivity, HDInsightMapReduceActivity, HDInsightPigActivity, HDInsightSparkActivity, HDInsightStreamingActivity, LookupActivity, SqlServerStoredProcedureActivity, WebActivity. + sub-classes are: AvroFormat, JsonFormat, OrcFormat, ParquetFormat, TextFormat. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. Type of dataset storage format.Constant filled by server. :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param serializer: Serializer. Type: string (or Expression with resultType string). + :type serializer: object + :param deserializer: Deserializer. Type: string (or Expression with resultType string). + :type deserializer: object """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'serializer': {'key': 'serializer', 'type': 'object'}, + 'deserializer': {'key': 'deserializer', 'type': 'object'}, } _subtype_map = { - 'type': {'AzureDataExplorerCommand': 'AzureDataExplorerCommandActivity', 'AzureFunctionActivity': 'AzureFunctionActivity', 'AzureMLBatchExecution': 'AzureMLBatchExecutionActivity', 'AzureMLExecutePipeline': 'AzureMLExecutePipelineActivity', 'AzureMLUpdateResource': 'AzureMLUpdateResourceActivity', 'Copy': 'CopyActivity', 'Custom': 'CustomActivity', 'DataLakeAnalyticsU-SQL': 'DataLakeAnalyticsUSQLActivity', 'DatabricksNotebook': 'DatabricksNotebookActivity', 'DatabricksSparkJar': 'DatabricksSparkJarActivity', 'DatabricksSparkPython': 'DatabricksSparkPythonActivity', 'Delete': 'DeleteActivity', 'ExecuteDataFlow': 'ExecuteDataFlowActivity', 'ExecuteSSISPackage': 'ExecuteSSISPackageActivity', 'GetMetadata': 'GetMetadataActivity', 'HDInsightHive': 'HDInsightHiveActivity', 'HDInsightMapReduce': 'HDInsightMapReduceActivity', 'HDInsightPig': 'HDInsightPigActivity', 'HDInsightSpark': 'HDInsightSparkActivity', 'HDInsightStreaming': 'HDInsightStreamingActivity', 'Lookup': 'LookupActivity', 'SqlServerStoredProcedure': 'SqlServerStoredProcedureActivity', 'WebActivity': 'WebActivity'} + 'type': {'AvroFormat': 'AvroFormat', 'JsonFormat': 'JsonFormat', 'OrcFormat': 'OrcFormat', 'ParquetFormat': 'ParquetFormat', 'TextFormat': 'TextFormat'} } def __init__( self, *, - name: str, additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - depends_on: Optional[List["ActivityDependency"]] = None, - user_properties: Optional[List["UserProperty"]] = None, - linked_service_name: Optional["LinkedServiceReference"] = None, - policy: Optional["ActivityPolicy"] = None, + serializer: Optional[object] = None, + deserializer: Optional[object] = None, **kwargs ): - super(ExecutionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.type: str = 'Execution' - self.linked_service_name = linked_service_name - self.policy = policy + super(DatasetStorageFormat, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = 'DatasetStorageFormat' # type: str + self.serializer = serializer + self.deserializer = deserializer -class AzureDataExplorerCommandActivity(ExecutionActivity): - """Azure Data Explorer command activity. +class AvroFormat(DatasetStorageFormat): + """The data stored in Avro format. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. Type of dataset storage format.Constant filled by server. :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param command: Required. A control command, according to the Azure Data Explorer command - syntax. Type: string (or Expression with resultType string). - :type command: object - :param command_timeout: Control command timeout. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))..). - :type command_timeout: object + :param serializer: Serializer. Type: string (or Expression with resultType string). + :type serializer: object + :param deserializer: Deserializer. Type: string (or Expression with resultType string). + :type deserializer: object """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, - 'command': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'command': {'key': 'typeProperties.command', 'type': 'object'}, - 'command_timeout': {'key': 'typeProperties.commandTimeout', 'type': 'object'}, + 'serializer': {'key': 'serializer', 'type': 'object'}, + 'deserializer': {'key': 'deserializer', 'type': 'object'}, } def __init__( self, *, - name: str, - command: object, additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - depends_on: Optional[List["ActivityDependency"]] = None, - user_properties: Optional[List["UserProperty"]] = None, - linked_service_name: Optional["LinkedServiceReference"] = None, - policy: Optional["ActivityPolicy"] = None, - command_timeout: Optional[object] = None, + serializer: Optional[object] = None, + deserializer: Optional[object] = None, **kwargs ): - super(AzureDataExplorerCommandActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type: str = 'AzureDataExplorerCommand' - self.command = command - self.command_timeout = command_timeout + super(AvroFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs) + self.type = 'AvroFormat' # type: str -class AzureDataExplorerLinkedService(LinkedService): - """Azure Data Explorer (Kusto) linked service. +class CopySink(msrest.serialization.Model): + """A copy activity sink. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AvroSink, AzureBlobFSSink, AzureDataExplorerSink, AzureDataLakeStoreSink, AzureMySqlSink, AzurePostgreSqlSink, AzureQueueSink, AzureSearchIndexSink, AzureSqlSink, AzureTableSink, BinarySink, BlobSink, CommonDataServiceForAppsSink, CosmosDbMongoDbApiSink, CosmosDbSqlApiSink, DelimitedTextSink, DocumentDbCollectionSink, DynamicsCrmSink, DynamicsSink, FileSystemSink, InformixSink, JsonSink, MicrosoftAccessSink, OdbcSink, OracleSink, OrcSink, ParquetSink, SalesforceServiceCloudSink, SalesforceSink, SapCloudForCustomerSink, SqlDWSink, SqlMISink, SqlServerSink, SqlSink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Copy sink type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param endpoint: Required. The endpoint of Azure Data Explorer (the engine's endpoint). URL - will be in the format https://:code:``.:code:``.kusto.windows.net. - Type: string (or Expression with resultType string). - :type endpoint: object - :param service_principal_id: Required. The ID of the service principal used to authenticate - against Azure Data Explorer. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: Required. The key of the service principal used to authenticate - against Kusto. - :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase - :param database: Required. Database name for connection. Type: string (or Expression with - resultType string). - :type database: object - :param tenant: Required. The name or ID of the tenant to which the service principal belongs. - Type: string (or Expression with resultType string). - :type tenant: object + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object """ _validation = { 'type': {'required': True}, - 'endpoint': {'required': True}, - 'service_principal_id': {'required': True}, - 'service_principal_key': {'required': True}, - 'database': {'required': True}, - 'tenant': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + } + + _subtype_map = { + 'type': {'AvroSink': 'AvroSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'AzureQueueSink': 'AzureQueueSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureSqlSink': 'AzureSqlSink', 'AzureTableSink': 'AzureTableSink', 'BinarySink': 'BinarySink', 'BlobSink': 'BlobSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'CosmosDbSqlApiSink': 'CosmosDbSqlApiSink', 'DelimitedTextSink': 'DelimitedTextSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'FileSystemSink': 'FileSystemSink', 'InformixSink': 'InformixSink', 'JsonSink': 'JsonSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'OdbcSink': 'OdbcSink', 'OracleSink': 'OracleSink', 'OrcSink': 'OrcSink', 'ParquetSink': 'ParquetSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink'} } def __init__( self, *, - endpoint: object, - service_principal_id: object, - service_principal_key: "SecretBase", - database: object, - tenant: object, additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, - description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, **kwargs ): - super(AzureDataExplorerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'AzureDataExplorer' - self.endpoint = endpoint - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.database = database - self.tenant = tenant + super(CopySink, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = 'CopySink' # type: str + self.write_batch_size = write_batch_size + self.write_batch_timeout = write_batch_timeout + self.sink_retry_count = sink_retry_count + self.sink_retry_wait = sink_retry_wait + self.max_concurrent_connections = max_concurrent_connections -class AzureDataExplorerTableDataset(Dataset): - """The Azure Data Explorer (Kusto) dataset. +class AvroSink(CopySink): + """A copy activity Avro sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Copy sink type.Constant filled by server. :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table: The table name of the Azure Data Explorer database. Type: string (or Expression - with resultType string). - :type table: object + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param store_settings: Avro store settings. + :type store_settings: ~azure.synapse.artifacts.models.StoreWriteSettings + :param format_settings: Avro format settings. + :type format_settings: ~azure.synapse.artifacts.models.AvroWriteSettings """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'AvroWriteSettings'}, } def __init__( self, *, - linked_service_name: "LinkedServiceReference", additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, - table: Optional[object] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + store_settings: Optional["StoreWriteSettings"] = None, + format_settings: Optional["AvroWriteSettings"] = None, **kwargs ): - super(AzureDataExplorerTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'AzureDataExplorerTable' - self.table = table + super(AvroSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'AvroSink' # type: str + self.store_settings = store_settings + self.format_settings = format_settings -class AzureDataLakeAnalyticsLinkedService(LinkedService): - """Azure Data Lake Analytics linked service. +class AvroSource(CopySource): + """A copy activity Avro source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param account_name: Required. The Azure Data Lake Analytics account name. Type: string (or - Expression with resultType string). - :type account_name: object - :param service_principal_id: The ID of the application used to authenticate against the Azure - Data Lake Analytics account. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The Key of the application used to authenticate against the Azure - Data Lake Analytics account. - :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase - :param tenant: Required. The name or ID of the tenant to which the service principal belongs. - Type: string (or Expression with resultType string). - :type tenant: object - :param subscription_id: Data Lake Analytics account subscription ID (if different from Data - Factory account). Type: string (or Expression with resultType string). - :type subscription_id: object - :param resource_group_name: Data Lake Analytics account resource group name (if different from - Data Factory account). Type: string (or Expression with resultType string). - :type resource_group_name: object - :param data_lake_analytics_uri: Azure Data Lake Analytics URI Type: string (or Expression with - resultType string). - :type data_lake_analytics_uri: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param store_settings: Avro store settings. + :type store_settings: ~azure.synapse.artifacts.models.StoreReadSettings """ _validation = { 'type': {'required': True}, - 'account_name': {'required': True}, - 'tenant': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'account_name': {'key': 'typeProperties.accountName', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'subscription_id': {'key': 'typeProperties.subscriptionId', 'type': 'object'}, - 'resource_group_name': {'key': 'typeProperties.resourceGroupName', 'type': 'object'}, - 'data_lake_analytics_uri': {'key': 'typeProperties.dataLakeAnalyticsUri', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, } def __init__( self, *, - account_name: object, - tenant: object, additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, - description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - service_principal_id: Optional[object] = None, - service_principal_key: Optional["SecretBase"] = None, - subscription_id: Optional[object] = None, - resource_group_name: Optional[object] = None, - data_lake_analytics_uri: Optional[object] = None, - encrypted_credential: Optional[object] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + store_settings: Optional["StoreReadSettings"] = None, **kwargs ): - super(AzureDataLakeAnalyticsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'AzureDataLakeAnalytics' - self.account_name = account_name - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.tenant = tenant - self.subscription_id = subscription_id - self.resource_group_name = resource_group_name - self.data_lake_analytics_uri = data_lake_analytics_uri - self.encrypted_credential = encrypted_credential + super(AvroSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'AvroSource' # type: str + self.store_settings = store_settings -class AzureDataLakeStoreLinkedService(LinkedService): - """Azure Data Lake Store linked service. +class FormatWriteSettings(msrest.serialization.Model): + """Format write settings. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AvroWriteSettings, DelimitedTextWriteSettings, JsonWriteSettings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The write setting type.Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'AvroWriteSettings': 'AvroWriteSettings', 'DelimitedTextWriteSettings': 'DelimitedTextWriteSettings', 'JsonWriteSettings': 'JsonWriteSettings'} + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + **kwargs + ): + super(FormatWriteSettings, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = 'FormatWriteSettings' # type: str + + +class AvroWriteSettings(FormatWriteSettings): + """Avro write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The write setting type.Constant filled by server. + :type type: str + :param record_name: Top level record name in write result, which is required in AVRO spec. + :type record_name: str + :param record_namespace: Record namespace in the write result. + :type record_namespace: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'record_name': {'key': 'recordName', 'type': 'str'}, + 'record_namespace': {'key': 'recordNamespace', 'type': 'str'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + record_name: Optional[str] = None, + record_namespace: Optional[str] = None, + **kwargs + ): + super(AvroWriteSettings, self).__init__(additional_properties=additional_properties, **kwargs) + self.type = 'AvroWriteSettings' # type: str + self.record_name = record_name + self.record_namespace = record_namespace + + +class AzureBatchLinkedService(LinkedService): + """Azure Batch linked service. All required parameters must be populated in order to send to Azure. @@ -1827,27 +1901,19 @@ class AzureDataLakeStoreLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param data_lake_store_uri: Required. Data Lake Store service URI. Type: string (or Expression - with resultType string). - :type data_lake_store_uri: object - :param service_principal_id: The ID of the application used to authenticate against the Azure - Data Lake Store account. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The Key of the application used to authenticate against the Azure - Data Lake Store account. - :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal belongs. Type: - string (or Expression with resultType string). - :type tenant: object - :param account_name: Data Lake Store account name. Type: string (or Expression with resultType - string). + :param account_name: Required. The Azure Batch account name. Type: string (or Expression with + resultType string). :type account_name: object - :param subscription_id: Data Lake Store account subscription ID (if different from Data Factory - account). Type: string (or Expression with resultType string). - :type subscription_id: object - :param resource_group_name: Data Lake Store account resource group name (if different from Data - Factory account). Type: string (or Expression with resultType string). - :type resource_group_name: object + :param access_key: The Azure Batch account access key. + :type access_key: ~azure.synapse.artifacts.models.SecretBase + :param batch_uri: Required. The Azure Batch URI. Type: string (or Expression with resultType + string). + :type batch_uri: object + :param pool_name: Required. The Azure Batch pool name. Type: string (or Expression with + resultType string). + :type pool_name: object + :param linked_service_name: Required. The Azure Storage linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -1856,7 +1922,10 @@ class AzureDataLakeStoreLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'data_lake_store_uri': {'required': True}, + 'account_name': {'required': True}, + 'batch_uri': {'required': True}, + 'pool_name': {'required': True}, + 'linked_service_name': {'required': True}, } _attribute_map = { @@ -1866,48 +1935,42 @@ class AzureDataLakeStoreLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'data_lake_store_uri': {'key': 'typeProperties.dataLakeStoreUri', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, 'account_name': {'key': 'typeProperties.accountName', 'type': 'object'}, - 'subscription_id': {'key': 'typeProperties.subscriptionId', 'type': 'object'}, - 'resource_group_name': {'key': 'typeProperties.resourceGroupName', 'type': 'object'}, + 'access_key': {'key': 'typeProperties.accessKey', 'type': 'SecretBase'}, + 'batch_uri': {'key': 'typeProperties.batchUri', 'type': 'object'}, + 'pool_name': {'key': 'typeProperties.poolName', 'type': 'object'}, + 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, *, - data_lake_store_uri: object, + account_name: object, + batch_uri: object, + pool_name: object, + linked_service_name: "LinkedServiceReference", additional_properties: Optional[Dict[str, object]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, - service_principal_id: Optional[object] = None, - service_principal_key: Optional["SecretBase"] = None, - tenant: Optional[object] = None, - account_name: Optional[object] = None, - subscription_id: Optional[object] = None, - resource_group_name: Optional[object] = None, + access_key: Optional["SecretBase"] = None, encrypted_credential: Optional[object] = None, **kwargs ): - super(AzureDataLakeStoreLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'AzureDataLakeStore' - self.data_lake_store_uri = data_lake_store_uri - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.tenant = tenant + super(AzureBatchLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'AzureBatch' # type: str self.account_name = account_name - self.subscription_id = subscription_id - self.resource_group_name = resource_group_name + self.access_key = access_key + self.batch_uri = batch_uri + self.pool_name = pool_name + self.linked_service_name = linked_service_name self.encrypted_credential = encrypted_credential -class AzureFileStorageLinkedService(LinkedService): - """Azure File Storage linked service. +class AzureBlobFSLinkedService(LinkedService): + """Azure Data Lake Storage Gen2 linked service. All required parameters must be populated in order to send to Azure. @@ -1924,14 +1987,21 @@ class AzureFileStorageLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param host: Required. Host name of the server. Type: string (or Expression with resultType - string). - :type host: object - :param user_id: User ID to logon the server. Type: string (or Expression with resultType - string). - :type user_id: object - :param password: Password to logon the server. - :type password: ~azure.synapse.artifacts.models.SecretBase + :param url: Required. Endpoint for the Azure Data Lake Storage Gen2 service. Type: string (or + Expression with resultType string). + :type url: object + :param account_key: Account key for the Azure Data Lake Storage Gen2 service. Type: string (or + Expression with resultType string). + :type account_key: object + :param service_principal_id: The ID of the application used to authenticate against the Azure + Data Lake Storage Gen2 account. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The Key of the application used to authenticate against the Azure + Data Lake Storage Gen2 account. + :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal belongs. Type: + string (or Expression with resultType string). + :type tenant: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -1940,7 +2010,7 @@ class AzureFileStorageLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'host': {'required': True}, + 'url': {'required': True}, } _attribute_map = { @@ -1950,247 +2020,301 @@ class AzureFileStorageLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'user_id': {'key': 'typeProperties.userId', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'account_key': {'key': 'typeProperties.accountKey', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, *, - host: object, + url: object, additional_properties: Optional[Dict[str, object]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, - user_id: Optional[object] = None, - password: Optional["SecretBase"] = None, + account_key: Optional[object] = None, + service_principal_id: Optional[object] = None, + service_principal_key: Optional["SecretBase"] = None, + tenant: Optional[object] = None, encrypted_credential: Optional[object] = None, **kwargs ): - super(AzureFileStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'AzureFileStorage' - self.host = host - self.user_id = user_id - self.password = password + super(AzureBlobFSLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'AzureBlobFS' # type: str + self.url = url + self.account_key = account_key + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant self.encrypted_credential = encrypted_credential -class AzureFunctionActivity(ExecutionActivity): - """Azure Function activity. +class AzureBlobFSLocation(DatasetLocation): + """The location of azure blobFS dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. Type of dataset storage location.Constant filled by server. :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param method: Required. Rest API method for target endpoint. Possible values include: "GET", - "POST", "PUT", "DELETE", "OPTIONS", "HEAD", "TRACE". - :type method: str or ~azure.synapse.artifacts.models.AzureFunctionActivityMethod - :param function_name: Required. Name of the Function that the Azure Function Activity will - call. Type: string (or Expression with resultType string). - :type function_name: object - :param headers: Represents the headers that will be sent to the request. For example, to set - the language and type on a request: "headers" : { "Accept-Language": "en-us", "Content-Type": - "application/json" }. Type: string (or Expression with resultType string). - :type headers: object - :param body: Represents the payload that will be sent to the endpoint. Required for POST/PUT - method, not allowed for GET method Type: string (or Expression with resultType string). - :type body: object - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'method': {'required': True}, - 'function_name': {'required': True}, + :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + string). + :type file_name: object + :param file_system: Specify the fileSystem of azure blobFS. Type: string (or Expression with + resultType string). + :type file_system: object + """ + + _validation = { + 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'method': {'key': 'typeProperties.method', 'type': 'str'}, - 'function_name': {'key': 'typeProperties.functionName', 'type': 'object'}, - 'headers': {'key': 'typeProperties.headers', 'type': 'object'}, - 'body': {'key': 'typeProperties.body', 'type': 'object'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'file_system': {'key': 'fileSystem', 'type': 'object'}, } def __init__( self, *, - name: str, - method: Union[str, "AzureFunctionActivityMethod"], - function_name: object, additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - depends_on: Optional[List["ActivityDependency"]] = None, - user_properties: Optional[List["UserProperty"]] = None, - linked_service_name: Optional["LinkedServiceReference"] = None, - policy: Optional["ActivityPolicy"] = None, - headers: Optional[object] = None, - body: Optional[object] = None, + folder_path: Optional[object] = None, + file_name: Optional[object] = None, + file_system: Optional[object] = None, **kwargs ): - super(AzureFunctionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type: str = 'AzureFunctionActivity' - self.method = method - self.function_name = function_name - self.headers = headers - self.body = body + super(AzureBlobFSLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) + self.type = 'AzureBlobFSLocation' # type: str + self.file_system = file_system -class AzureFunctionLinkedService(LinkedService): - """Azure Function linked service. +class AzureBlobFSReadSettings(StoreReadSettings): + """Azure blobFS read settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. The read setting type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param function_app_url: Required. The endpoint of the Azure Function App. URL will be in the - format https://:code:``.azurewebsites.net. - :type function_app_url: object - :param function_key: Function or Host key for Azure Function App. - :type function_key: ~azure.synapse.artifacts.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :type recursive: object + :param wildcard_folder_path: Azure blobFS wildcardFolderPath. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type wildcard_folder_path: object + :param wildcard_file_name: Azure blobFS wildcardFileName. Type: string (or Expression with + resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. Type: string (or + Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression + with resultType string). + :type modified_datetime_end: object """ _validation = { 'type': {'required': True}, - 'function_app_url': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'function_app_url': {'key': 'typeProperties.functionAppUrl', 'type': 'object'}, - 'function_key': {'key': 'typeProperties.functionKey', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } def __init__( self, *, - function_app_url: object, additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, - description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - function_key: Optional["SecretBase"] = None, - encrypted_credential: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + recursive: Optional[object] = None, + wildcard_folder_path: Optional[object] = None, + wildcard_file_name: Optional[object] = None, + enable_partition_discovery: Optional[bool] = None, + modified_datetime_start: Optional[object] = None, + modified_datetime_end: Optional[object] = None, **kwargs ): - super(AzureFunctionLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'AzureFunction' - self.function_app_url = function_app_url - self.function_key = function_key - self.encrypted_credential = encrypted_credential + super(AzureBlobFSReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'AzureBlobFSReadSettings' # type: str + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.enable_partition_discovery = enable_partition_discovery + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end -class AzureKeyVaultLinkedService(LinkedService): - """Azure Key Vault linked service. +class AzureBlobFSSink(CopySink): + """A copy activity Azure Data Lake Storage Gen2 sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Copy sink type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param base_url: Required. The base URL of the Azure Key Vault. e.g. - https://myakv.vault.azure.net Type: string (or Expression with resultType string). - :type base_url: object + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object """ _validation = { 'type': {'required': True}, - 'base_url': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'base_url': {'key': 'typeProperties.baseUrl', 'type': 'object'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } def __init__( self, *, - base_url: object, additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, - description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + copy_behavior: Optional[object] = None, **kwargs ): - super(AzureKeyVaultLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'AzureKeyVault' - self.base_url = base_url + super(AzureBlobFSSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'AzureBlobFSSink' # type: str + self.copy_behavior = copy_behavior -class SecretBase(msrest.serialization.Model): - """The base definition of a secret type. +class AzureBlobFSSource(CopySource): + """A copy activity Azure BlobFS source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param treat_empty_as_null: Treat empty as null. Type: boolean (or Expression with resultType + boolean). + :type treat_empty_as_null: object + :param skip_header_line_count: Number of header lines to skip from each blob. Type: integer (or + Expression with resultType integer). + :type skip_header_line_count: object + :param recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :type recursive: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, + 'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + treat_empty_as_null: Optional[object] = None, + skip_header_line_count: Optional[object] = None, + recursive: Optional[object] = None, + **kwargs + ): + super(AzureBlobFSSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'AzureBlobFSSource' # type: str + self.treat_empty_as_null = treat_empty_as_null + self.skip_header_line_count = skip_header_line_count + self.recursive = recursive + + +class StoreWriteSettings(msrest.serialization.Model): + """Connector write settings. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureKeyVaultSecretReference, SecureString. + sub-classes are: AzureBlobFSWriteSettings, AzureBlobStorageWriteSettings, AzureDataLakeStoreWriteSettings, FileServerWriteSettings, SftpWriteSettings. All required parameters must be populated in order to send to Azure. - :param type: Required. Type of the secret.Constant filled by server. + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The write setting type.Constant filled by server. :type type: str + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object """ _validation = { @@ -2198,68 +2322,79 @@ class SecretBase(msrest.serialization.Model): } _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } _subtype_map = { - 'type': {'AzureKeyVaultSecret': 'AzureKeyVaultSecretReference', 'SecureString': 'SecureString'} + 'type': {'AzureBlobFSWriteSettings': 'AzureBlobFSWriteSettings', 'AzureBlobStorageWriteSettings': 'AzureBlobStorageWriteSettings', 'AzureDataLakeStoreWriteSettings': 'AzureDataLakeStoreWriteSettings', 'FileServerWriteSettings': 'FileServerWriteSettings', 'SftpWriteSettings': 'SftpWriteSettings'} } def __init__( self, + *, + additional_properties: Optional[Dict[str, object]] = None, + max_concurrent_connections: Optional[object] = None, + copy_behavior: Optional[object] = None, **kwargs ): - super(SecretBase, self).__init__(**kwargs) - self.type: Optional[str] = None + super(StoreWriteSettings, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = 'StoreWriteSettings' # type: str + self.max_concurrent_connections = max_concurrent_connections + self.copy_behavior = copy_behavior -class AzureKeyVaultSecretReference(SecretBase): - """Azure Key Vault secret reference. +class AzureBlobFSWriteSettings(StoreWriteSettings): + """Azure blobFS write settings. All required parameters must be populated in order to send to Azure. - :param type: Required. Type of the secret.Constant filled by server. + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The write setting type.Constant filled by server. :type type: str - :param store: Required. The Azure Key Vault linked service reference. - :type store: ~azure.synapse.artifacts.models.LinkedServiceReference - :param secret_name: Required. The name of the secret in Azure Key Vault. Type: string (or - Expression with resultType string). - :type secret_name: object - :param secret_version: The version of the secret in Azure Key Vault. The default value is the - latest version of the secret. Type: string (or Expression with resultType string). - :type secret_version: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + :param block_size_in_mb: Indicates the block size(MB) when writing data to blob. Type: integer + (or Expression with resultType integer). + :type block_size_in_mb: object """ _validation = { 'type': {'required': True}, - 'store': {'required': True}, - 'secret_name': {'required': True}, } _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'store': {'key': 'store', 'type': 'LinkedServiceReference'}, - 'secret_name': {'key': 'secretName', 'type': 'object'}, - 'secret_version': {'key': 'secretVersion', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'block_size_in_mb': {'key': 'blockSizeInMB', 'type': 'object'}, } def __init__( self, *, - store: "LinkedServiceReference", - secret_name: object, - secret_version: Optional[object] = None, + additional_properties: Optional[Dict[str, object]] = None, + max_concurrent_connections: Optional[object] = None, + copy_behavior: Optional[object] = None, + block_size_in_mb: Optional[object] = None, **kwargs ): - super(AzureKeyVaultSecretReference, self).__init__(**kwargs) - self.type: str = 'AzureKeyVaultSecret' - self.store = store - self.secret_name = secret_name - self.secret_version = secret_version + super(AzureBlobFSWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + self.type = 'AzureBlobFSWriteSettings' # type: str + self.block_size_in_mb = block_size_in_mb -class AzureMariaDBLinkedService(LinkedService): - """Azure Database for MariaDB linked service. +class AzureBlobStorageLinkedService(LinkedService): + """The azure blob storage linked service. All required parameters must be populated in order to send to Azure. @@ -2276,15 +2411,33 @@ class AzureMariaDBLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param connection_string: An ODBC connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. + :param connection_string: The connection string. It is mutually exclusive with sasUri, + serviceEndpoint property. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object - :param pwd: The Azure key vault secret reference of password in connection string. - :type pwd: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param account_key: The Azure key vault secret reference of accountKey in connection string. + :type account_key: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param sas_uri: SAS URI of the Azure Blob Storage resource. It is mutually exclusive with + connectionString, serviceEndpoint property. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type sas_uri: object + :param sas_token: The Azure key vault secret reference of sasToken in sas uri. + :type sas_token: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param service_endpoint: Blob service endpoint of the Azure Blob Storage resource. It is + mutually exclusive with connectionString, sasUri property. + :type service_endpoint: str + :param service_principal_id: The ID of the service principal used to authenticate against Azure + SQL Data Warehouse. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The key of the service principal used to authenticate against + Azure SQL Data Warehouse. + :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal belongs. Type: + string (or Expression with resultType string). + :type tenant: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type encrypted_credential: str """ _validation = { @@ -2299,8 +2452,14 @@ class AzureMariaDBLinkedService(LinkedService): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, + 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, + 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, + 'service_endpoint': {'key': 'typeProperties.serviceEndpoint', 'type': 'str'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, } def __init__( @@ -2312,260 +2471,201 @@ def __init__( parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, connection_string: Optional[object] = None, - pwd: Optional["AzureKeyVaultSecretReference"] = None, - encrypted_credential: Optional[object] = None, + account_key: Optional["AzureKeyVaultSecretReference"] = None, + sas_uri: Optional[object] = None, + sas_token: Optional["AzureKeyVaultSecretReference"] = None, + service_endpoint: Optional[str] = None, + service_principal_id: Optional[object] = None, + service_principal_key: Optional["SecretBase"] = None, + tenant: Optional[object] = None, + encrypted_credential: Optional[str] = None, **kwargs ): - super(AzureMariaDBLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'AzureMariaDB' + super(AzureBlobStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'AzureBlobStorage' # type: str self.connection_string = connection_string - self.pwd = pwd + self.account_key = account_key + self.sas_uri = sas_uri + self.sas_token = sas_token + self.service_endpoint = service_endpoint + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant self.encrypted_credential = encrypted_credential -class AzureMariaDBTableDataset(Dataset): - """Azure Database for MariaDB dataset. +class AzureBlobStorageLocation(DatasetLocation): + """The location of azure blob dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Type of dataset storage location.Constant filled by server. :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + string). + :type file_name: object + :param container: Specify the container of azure blob. Type: string (or Expression with + resultType string). + :type container: object """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'container': {'key': 'container', 'type': 'object'}, } def __init__( self, *, - linked_service_name: "LinkedServiceReference", additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, + folder_path: Optional[object] = None, + file_name: Optional[object] = None, + container: Optional[object] = None, **kwargs ): - super(AzureMariaDBTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'AzureMariaDBTable' - self.table_name = table_name + super(AzureBlobStorageLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) + self.type = 'AzureBlobStorageLocation' # type: str + self.container = container -class AzureMLBatchExecutionActivity(ExecutionActivity): - """Azure ML Batch Execution activity. +class AzureBlobStorageReadSettings(StoreReadSettings): + """Azure blob read settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. The read setting type.Constant filled by server. :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param global_parameters: Key,Value pairs to be passed to the Azure ML Batch Execution Service - endpoint. Keys must match the names of web service parameters defined in the published Azure ML - web service. Values will be passed in the GlobalParameters property of the Azure ML batch - execution request. - :type global_parameters: dict[str, object] - :param web_service_outputs: Key,Value pairs, mapping the names of Azure ML endpoint's Web - Service Outputs to AzureMLWebServiceFile objects specifying the output Blob locations. This - information will be passed in the WebServiceOutputs property of the Azure ML batch execution - request. - :type web_service_outputs: dict[str, ~azure.synapse.artifacts.models.AzureMLWebServiceFile] - :param web_service_inputs: Key,Value pairs, mapping the names of Azure ML endpoint's Web - Service Inputs to AzureMLWebServiceFile objects specifying the input Blob locations.. This - information will be passed in the WebServiceInputs property of the Azure ML batch execution - request. - :type web_service_inputs: dict[str, ~azure.synapse.artifacts.models.AzureMLWebServiceFile] + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :type recursive: object + :param wildcard_folder_path: Azure blob wildcardFolderPath. Type: string (or Expression with + resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Azure blob wildcardFileName. Type: string (or Expression with + resultType string). + :type wildcard_file_name: object + :param prefix: The prefix filter for the Azure Blob name. Type: string (or Expression with + resultType string). + :type prefix: object + :param enable_partition_discovery: Indicates whether to enable partition discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. Type: string (or + Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression + with resultType string). + :type modified_datetime_end: object """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'global_parameters': {'key': 'typeProperties.globalParameters', 'type': '{object}'}, - 'web_service_outputs': {'key': 'typeProperties.webServiceOutputs', 'type': '{AzureMLWebServiceFile}'}, - 'web_service_inputs': {'key': 'typeProperties.webServiceInputs', 'type': '{AzureMLWebServiceFile}'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'prefix': {'key': 'prefix', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } def __init__( self, *, - name: str, additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - depends_on: Optional[List["ActivityDependency"]] = None, - user_properties: Optional[List["UserProperty"]] = None, - linked_service_name: Optional["LinkedServiceReference"] = None, - policy: Optional["ActivityPolicy"] = None, - global_parameters: Optional[Dict[str, object]] = None, - web_service_outputs: Optional[Dict[str, "AzureMLWebServiceFile"]] = None, - web_service_inputs: Optional[Dict[str, "AzureMLWebServiceFile"]] = None, + max_concurrent_connections: Optional[object] = None, + recursive: Optional[object] = None, + wildcard_folder_path: Optional[object] = None, + wildcard_file_name: Optional[object] = None, + prefix: Optional[object] = None, + enable_partition_discovery: Optional[bool] = None, + modified_datetime_start: Optional[object] = None, + modified_datetime_end: Optional[object] = None, **kwargs ): - super(AzureMLBatchExecutionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type: str = 'AzureMLBatchExecution' - self.global_parameters = global_parameters - self.web_service_outputs = web_service_outputs - self.web_service_inputs = web_service_inputs + super(AzureBlobStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'AzureBlobStorageReadSettings' # type: str + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.prefix = prefix + self.enable_partition_discovery = enable_partition_discovery + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end -class AzureMLExecutePipelineActivity(ExecutionActivity): - """Azure ML Execute Pipeline activity. +class AzureBlobStorageWriteSettings(StoreWriteSettings): + """Azure blob write settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. The write setting type.Constant filled by server. :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param ml_pipeline_id: Required. ID of the published Azure ML pipeline. Type: string (or - Expression with resultType string). - :type ml_pipeline_id: object - :param experiment_name: Run history experiment name of the pipeline run. This information will - be passed in the ExperimentName property of the published pipeline execution request. Type: - string (or Expression with resultType string). - :type experiment_name: object - :param ml_pipeline_parameters: Key,Value pairs to be passed to the published Azure ML pipeline - endpoint. Keys must match the names of pipeline parameters defined in the published pipeline. - Values will be passed in the ParameterAssignments property of the published pipeline execution - request. Type: object with key value pairs (or Expression with resultType object). - :type ml_pipeline_parameters: object - :param ml_parent_run_id: The parent Azure ML Service pipeline run id. This information will be - passed in the ParentRunId property of the published pipeline execution request. Type: string - (or Expression with resultType string). - :type ml_parent_run_id: object - :param continue_on_step_failure: Whether to continue execution of other steps in the - PipelineRun if a step fails. This information will be passed in the continueOnStepFailure - property of the published pipeline execution request. Type: boolean (or Expression with - resultType boolean). - :type continue_on_step_failure: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + :param block_size_in_mb: Indicates the block size(MB) when writing data to blob. Type: integer + (or Expression with resultType integer). + :type block_size_in_mb: object """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, - 'ml_pipeline_id': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'ml_pipeline_id': {'key': 'typeProperties.mlPipelineId', 'type': 'object'}, - 'experiment_name': {'key': 'typeProperties.experimentName', 'type': 'object'}, - 'ml_pipeline_parameters': {'key': 'typeProperties.mlPipelineParameters', 'type': 'object'}, - 'ml_parent_run_id': {'key': 'typeProperties.mlParentRunId', 'type': 'object'}, - 'continue_on_step_failure': {'key': 'typeProperties.continueOnStepFailure', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'block_size_in_mb': {'key': 'blockSizeInMB', 'type': 'object'}, } def __init__( self, *, - name: str, - ml_pipeline_id: object, additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - depends_on: Optional[List["ActivityDependency"]] = None, - user_properties: Optional[List["UserProperty"]] = None, - linked_service_name: Optional["LinkedServiceReference"] = None, - policy: Optional["ActivityPolicy"] = None, - experiment_name: Optional[object] = None, - ml_pipeline_parameters: Optional[object] = None, - ml_parent_run_id: Optional[object] = None, - continue_on_step_failure: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + copy_behavior: Optional[object] = None, + block_size_in_mb: Optional[object] = None, **kwargs ): - super(AzureMLExecutePipelineActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type: str = 'AzureMLExecutePipeline' - self.ml_pipeline_id = ml_pipeline_id - self.experiment_name = experiment_name - self.ml_pipeline_parameters = ml_pipeline_parameters - self.ml_parent_run_id = ml_parent_run_id - self.continue_on_step_failure = continue_on_step_failure + super(AzureBlobStorageWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + self.type = 'AzureBlobStorageWriteSettings' # type: str + self.block_size_in_mb = block_size_in_mb -class AzureMLLinkedService(LinkedService): - """Azure ML Studio Web Service linked service. +class AzureDatabricksLinkedService(LinkedService): + """Azure Databricks linked service. All required parameters must be populated in order to send to Azure. @@ -2582,24 +2682,55 @@ class AzureMLLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param ml_endpoint: Required. The Batch Execution REST URL for an Azure ML Studio Web Service - endpoint. Type: string (or Expression with resultType string). - :type ml_endpoint: object - :param api_key: Required. The API key for accessing the Azure ML model endpoint. - :type api_key: ~azure.synapse.artifacts.models.SecretBase - :param update_resource_endpoint: The Update Resource REST URL for an Azure ML Studio Web - Service endpoint. Type: string (or Expression with resultType string). - :type update_resource_endpoint: object - :param service_principal_id: The ID of the service principal used to authenticate against the - ARM-based updateResourceEndpoint of an Azure ML Studio web service. Type: string (or Expression + :param domain: Required. :code:``.azuredatabricks.net, domain name of your Databricks + deployment. Type: string (or Expression with resultType string). + :type domain: object + :param access_token: Required. Access token for databricks REST API. Refer to + https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The key of the service principal used to authenticate against the - ARM-based updateResourceEndpoint of an Azure ML Studio web service. - :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal belongs. Type: - string (or Expression with resultType string). - :type tenant: object + :type access_token: ~azure.synapse.artifacts.models.SecretBase + :param existing_cluster_id: The id of an existing interactive cluster that will be used for all + runs of this activity. Type: string (or Expression with resultType string). + :type existing_cluster_id: object + :param instance_pool_id: The id of an existing instance pool that will be used for all runs of + this activity. Type: string (or Expression with resultType string). + :type instance_pool_id: object + :param new_cluster_version: If not using an existing interactive cluster, this specifies the + Spark version of a new job cluster or instance pool nodes created for each run of this + activity. Required if instancePoolId is specified. Type: string (or Expression with resultType + string). + :type new_cluster_version: object + :param new_cluster_num_of_worker: If not using an existing interactive cluster, this specifies + the number of worker nodes to use for the new job cluster or instance pool. For new job + clusters, this a string-formatted Int32, like '1' means numOfWorker is 1 or '1:10' means auto- + scale from 1 (min) to 10 (max). For instance pools, this is a string-formatted Int32, and can + only specify a fixed number of worker nodes, such as '2'. Required if newClusterVersion is + specified. Type: string (or Expression with resultType string). + :type new_cluster_num_of_worker: object + :param new_cluster_node_type: The node type of the new job cluster. This property is required + if newClusterVersion is specified and instancePoolId is not specified. If instancePoolId is + specified, this property is ignored. Type: string (or Expression with resultType string). + :type new_cluster_node_type: object + :param new_cluster_spark_conf: A set of optional, user-specified Spark configuration key-value + pairs. + :type new_cluster_spark_conf: dict[str, object] + :param new_cluster_spark_env_vars: A set of optional, user-specified Spark environment + variables key-value pairs. + :type new_cluster_spark_env_vars: dict[str, object] + :param new_cluster_custom_tags: Additional tags for cluster resources. This property is ignored + in instance pool configurations. + :type new_cluster_custom_tags: dict[str, object] + :param new_cluster_driver_node_type: The driver node type for the new job cluster. This + property is ignored in instance pool configurations. Type: string (or Expression with + resultType string). + :type new_cluster_driver_node_type: object + :param new_cluster_init_scripts: User-defined initialization scripts for the new cluster. Type: + array of strings (or Expression with resultType array of strings). + :type new_cluster_init_scripts: object + :param new_cluster_enable_elastic_disk: Enable the elastic disk on the new cluster. This + property is now ignored, and takes the default elastic disk behavior in Databricks (elastic + disks are always enabled). Type: boolean (or Expression with resultType boolean). + :type new_cluster_enable_elastic_disk: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -2608,8 +2739,8 @@ class AzureMLLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'ml_endpoint': {'required': True}, - 'api_key': {'required': True}, + 'domain': {'required': True}, + 'access_token': {'required': True}, } _attribute_map = { @@ -2619,139 +2750,131 @@ class AzureMLLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'ml_endpoint': {'key': 'typeProperties.mlEndpoint', 'type': 'object'}, - 'api_key': {'key': 'typeProperties.apiKey', 'type': 'SecretBase'}, - 'update_resource_endpoint': {'key': 'typeProperties.updateResourceEndpoint', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'domain': {'key': 'typeProperties.domain', 'type': 'object'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'existing_cluster_id': {'key': 'typeProperties.existingClusterId', 'type': 'object'}, + 'instance_pool_id': {'key': 'typeProperties.instancePoolId', 'type': 'object'}, + 'new_cluster_version': {'key': 'typeProperties.newClusterVersion', 'type': 'object'}, + 'new_cluster_num_of_worker': {'key': 'typeProperties.newClusterNumOfWorker', 'type': 'object'}, + 'new_cluster_node_type': {'key': 'typeProperties.newClusterNodeType', 'type': 'object'}, + 'new_cluster_spark_conf': {'key': 'typeProperties.newClusterSparkConf', 'type': '{object}'}, + 'new_cluster_spark_env_vars': {'key': 'typeProperties.newClusterSparkEnvVars', 'type': '{object}'}, + 'new_cluster_custom_tags': {'key': 'typeProperties.newClusterCustomTags', 'type': '{object}'}, + 'new_cluster_driver_node_type': {'key': 'typeProperties.newClusterDriverNodeType', 'type': 'object'}, + 'new_cluster_init_scripts': {'key': 'typeProperties.newClusterInitScripts', 'type': 'object'}, + 'new_cluster_enable_elastic_disk': {'key': 'typeProperties.newClusterEnableElasticDisk', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, *, - ml_endpoint: object, - api_key: "SecretBase", + domain: object, + access_token: "SecretBase", additional_properties: Optional[Dict[str, object]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, - update_resource_endpoint: Optional[object] = None, - service_principal_id: Optional[object] = None, - service_principal_key: Optional["SecretBase"] = None, - tenant: Optional[object] = None, + existing_cluster_id: Optional[object] = None, + instance_pool_id: Optional[object] = None, + new_cluster_version: Optional[object] = None, + new_cluster_num_of_worker: Optional[object] = None, + new_cluster_node_type: Optional[object] = None, + new_cluster_spark_conf: Optional[Dict[str, object]] = None, + new_cluster_spark_env_vars: Optional[Dict[str, object]] = None, + new_cluster_custom_tags: Optional[Dict[str, object]] = None, + new_cluster_driver_node_type: Optional[object] = None, + new_cluster_init_scripts: Optional[object] = None, + new_cluster_enable_elastic_disk: Optional[object] = None, encrypted_credential: Optional[object] = None, **kwargs ): - super(AzureMLLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'AzureML' - self.ml_endpoint = ml_endpoint - self.api_key = api_key - self.update_resource_endpoint = update_resource_endpoint - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.tenant = tenant + super(AzureDatabricksLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'AzureDatabricks' # type: str + self.domain = domain + self.access_token = access_token + self.existing_cluster_id = existing_cluster_id + self.instance_pool_id = instance_pool_id + self.new_cluster_version = new_cluster_version + self.new_cluster_num_of_worker = new_cluster_num_of_worker + self.new_cluster_node_type = new_cluster_node_type + self.new_cluster_spark_conf = new_cluster_spark_conf + self.new_cluster_spark_env_vars = new_cluster_spark_env_vars + self.new_cluster_custom_tags = new_cluster_custom_tags + self.new_cluster_driver_node_type = new_cluster_driver_node_type + self.new_cluster_init_scripts = new_cluster_init_scripts + self.new_cluster_enable_elastic_disk = new_cluster_enable_elastic_disk self.encrypted_credential = encrypted_credential -class AzureMLServiceLinkedService(LinkedService): - """Azure ML Service linked service. +class ExecutionActivity(Activity): + """Base class for all execution activities. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AzureDataExplorerCommandActivity, AzureFunctionActivity, AzureMLBatchExecutionActivity, AzureMLExecutePipelineActivity, AzureMLUpdateResourceActivity, CopyActivity, CustomActivity, DataLakeAnalyticsUSQLActivity, DatabricksNotebookActivity, DatabricksSparkJarActivity, DatabricksSparkPythonActivity, DeleteActivity, ExecuteDataFlowActivity, ExecuteSSISPackageActivity, GetMetadataActivity, HDInsightHiveActivity, HDInsightMapReduceActivity, HDInsightPigActivity, HDInsightSparkActivity, HDInsightStreamingActivity, LookupActivity, SqlServerStoredProcedureActivity, WebActivity. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. + :param description: Activity description. :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param subscription_id: Required. Azure ML Service workspace subscription ID. Type: string (or - Expression with resultType string). - :type subscription_id: object - :param resource_group_name: Required. Azure ML Service workspace resource group name. Type: - string (or Expression with resultType string). - :type resource_group_name: object - :param ml_workspace_name: Required. Azure ML Service workspace name. Type: string (or - Expression with resultType string). - :type ml_workspace_name: object - :param service_principal_id: The ID of the service principal used to authenticate against the - endpoint of a published Azure ML Service pipeline. Type: string (or Expression with resultType - string). - :type service_principal_id: object - :param service_principal_key: The key of the service principal used to authenticate against the - endpoint of a published Azure ML Service pipeline. - :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal belongs. Type: - string (or Expression with resultType string). - :type tenant: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param linked_service_name: Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy """ _validation = { + 'name': {'required': True}, 'type': {'required': True}, - 'subscription_id': {'required': True}, - 'resource_group_name': {'required': True}, - 'ml_workspace_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'subscription_id': {'key': 'typeProperties.subscriptionId', 'type': 'object'}, - 'resource_group_name': {'key': 'typeProperties.resourceGroupName', 'type': 'object'}, - 'ml_workspace_name': {'key': 'typeProperties.mlWorkspaceName', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + } + + _subtype_map = { + 'type': {'AzureDataExplorerCommand': 'AzureDataExplorerCommandActivity', 'AzureFunctionActivity': 'AzureFunctionActivity', 'AzureMLBatchExecution': 'AzureMLBatchExecutionActivity', 'AzureMLExecutePipeline': 'AzureMLExecutePipelineActivity', 'AzureMLUpdateResource': 'AzureMLUpdateResourceActivity', 'Copy': 'CopyActivity', 'Custom': 'CustomActivity', 'DataLakeAnalyticsU-SQL': 'DataLakeAnalyticsUSQLActivity', 'DatabricksNotebook': 'DatabricksNotebookActivity', 'DatabricksSparkJar': 'DatabricksSparkJarActivity', 'DatabricksSparkPython': 'DatabricksSparkPythonActivity', 'Delete': 'DeleteActivity', 'ExecuteDataFlow': 'ExecuteDataFlowActivity', 'ExecuteSSISPackage': 'ExecuteSSISPackageActivity', 'GetMetadata': 'GetMetadataActivity', 'HDInsightHive': 'HDInsightHiveActivity', 'HDInsightMapReduce': 'HDInsightMapReduceActivity', 'HDInsightPig': 'HDInsightPigActivity', 'HDInsightSpark': 'HDInsightSparkActivity', 'HDInsightStreaming': 'HDInsightStreamingActivity', 'Lookup': 'LookupActivity', 'SqlServerStoredProcedure': 'SqlServerStoredProcedureActivity', 'WebActivity': 'WebActivity'} } def __init__( self, *, - subscription_id: object, - resource_group_name: object, - ml_workspace_name: object, + name: str, additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - service_principal_id: Optional[object] = None, - service_principal_key: Optional["SecretBase"] = None, - tenant: Optional[object] = None, - encrypted_credential: Optional[object] = None, + depends_on: Optional[List["ActivityDependency"]] = None, + user_properties: Optional[List["UserProperty"]] = None, + linked_service_name: Optional["LinkedServiceReference"] = None, + policy: Optional["ActivityPolicy"] = None, **kwargs ): - super(AzureMLServiceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'AzureMLService' - self.subscription_id = subscription_id - self.resource_group_name = resource_group_name - self.ml_workspace_name = ml_workspace_name - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.tenant = tenant - self.encrypted_credential = encrypted_credential + super(ExecutionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.type = 'Execution' # type: str + self.linked_service_name = linked_service_name + self.policy = policy -class AzureMLUpdateResourceActivity(ExecutionActivity): - """Azure ML Update Resource management activity. +class AzureDataExplorerCommandActivity(ExecutionActivity): + """Azure Data Explorer command activity. All required parameters must be populated in order to send to Azure. @@ -2772,24 +2895,18 @@ class AzureMLUpdateResourceActivity(ExecutionActivity): :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param trained_model_name: Required. Name of the Trained Model module in the Web Service - experiment to be updated. Type: string (or Expression with resultType string). - :type trained_model_name: object - :param trained_model_linked_service_name: Required. Name of Azure Storage linked service - holding the .ilearner file that will be uploaded by the update operation. - :type trained_model_linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param trained_model_file_path: Required. The relative file path in trainedModelLinkedService - to represent the .ilearner file that will be uploaded by the update operation. Type: string - (or Expression with resultType string). - :type trained_model_file_path: object + :param command: Required. A control command, according to the Azure Data Explorer command + syntax. Type: string (or Expression with resultType string). + :type command: object + :param command_timeout: Control command timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))..). + :type command_timeout: object """ _validation = { 'name': {'required': True}, 'type': {'required': True}, - 'trained_model_name': {'required': True}, - 'trained_model_linked_service_name': {'required': True}, - 'trained_model_file_path': {'required': True}, + 'command': {'required': True}, } _attribute_map = { @@ -2801,70 +2918,32 @@ class AzureMLUpdateResourceActivity(ExecutionActivity): 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'trained_model_name': {'key': 'typeProperties.trainedModelName', 'type': 'object'}, - 'trained_model_linked_service_name': {'key': 'typeProperties.trainedModelLinkedServiceName', 'type': 'LinkedServiceReference'}, - 'trained_model_file_path': {'key': 'typeProperties.trainedModelFilePath', 'type': 'object'}, + 'command': {'key': 'typeProperties.command', 'type': 'object'}, + 'command_timeout': {'key': 'typeProperties.commandTimeout', 'type': 'object'}, } def __init__( self, *, name: str, - trained_model_name: object, - trained_model_linked_service_name: "LinkedServiceReference", - trained_model_file_path: object, + command: object, additional_properties: Optional[Dict[str, object]] = None, description: Optional[str] = None, depends_on: Optional[List["ActivityDependency"]] = None, user_properties: Optional[List["UserProperty"]] = None, linked_service_name: Optional["LinkedServiceReference"] = None, policy: Optional["ActivityPolicy"] = None, + command_timeout: Optional[object] = None, **kwargs ): - super(AzureMLUpdateResourceActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type: str = 'AzureMLUpdateResource' - self.trained_model_name = trained_model_name - self.trained_model_linked_service_name = trained_model_linked_service_name - self.trained_model_file_path = trained_model_file_path - - -class AzureMLWebServiceFile(msrest.serialization.Model): - """Azure ML WebService Input/Output file. - - All required parameters must be populated in order to send to Azure. - - :param file_path: Required. The relative file path, including container name, in the Azure Blob - Storage specified by the LinkedService. Type: string (or Expression with resultType string). - :type file_path: object - :param linked_service_name: Required. Reference to an Azure Storage LinkedService, where Azure - ML WebService Input/Output file located. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - """ - - _validation = { - 'file_path': {'required': True}, - 'linked_service_name': {'required': True}, - } - - _attribute_map = { - 'file_path': {'key': 'filePath', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - } - - def __init__( - self, - *, - file_path: object, - linked_service_name: "LinkedServiceReference", - **kwargs - ): - super(AzureMLWebServiceFile, self).__init__(**kwargs) - self.file_path = file_path - self.linked_service_name = linked_service_name + super(AzureDataExplorerCommandActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.type = 'AzureDataExplorerCommand' # type: str + self.command = command + self.command_timeout = command_timeout -class AzureMySqlLinkedService(LinkedService): - """Azure MySQL database linked service. +class AzureDataExplorerLinkedService(LinkedService): + """Azure Data Explorer (Kusto) linked service. All required parameters must be populated in order to send to Azure. @@ -2881,20 +2960,31 @@ class AzureMySqlLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param connection_string: Required. The connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param password: The Azure key vault secret reference of password in connection string. - :type password: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with + :param endpoint: Required. The endpoint of Azure Data Explorer (the engine's endpoint). URL + will be in the format https://:code:``.:code:``.kusto.windows.net. + Type: string (or Expression with resultType string). + :type endpoint: object + :param service_principal_id: Required. The ID of the service principal used to authenticate + against Azure Data Explorer. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: Required. The key of the service principal used to authenticate + against Kusto. + :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase + :param database: Required. Database name for connection. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type database: object + :param tenant: Required. The name or ID of the tenant to which the service principal belongs. + Type: string (or Expression with resultType string). + :type tenant: object """ _validation = { 'type': {'required': True}, - 'connection_string': {'required': True}, + 'endpoint': {'required': True}, + 'service_principal_id': {'required': True}, + 'service_principal_key': {'required': True}, + 'database': {'required': True}, + 'tenant': {'required': True}, } _attribute_map = { @@ -2904,173 +2994,178 @@ class AzureMySqlLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, } def __init__( self, *, - connection_string: object, + endpoint: object, + service_principal_id: object, + service_principal_key: "SecretBase", + database: object, + tenant: object, additional_properties: Optional[Dict[str, object]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, - password: Optional["AzureKeyVaultSecretReference"] = None, - encrypted_credential: Optional[object] = None, **kwargs ): - super(AzureMySqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'AzureMySql' - self.connection_string = connection_string - self.password = password - self.encrypted_credential = encrypted_credential + super(AzureDataExplorerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'AzureDataExplorer' # type: str + self.endpoint = endpoint + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.database = database + self.tenant = tenant -class AzureMySqlTableDataset(Dataset): - """The Azure MySQL database dataset. +class AzureDataExplorerSink(CopySink): + """A copy activity Azure Data Explorer sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Copy sink type.Constant filled by server. :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The Azure MySQL database table name. Type: string (or Expression with - resultType string). - :type table_name: object - :param table: The name of Azure MySQL database table. Type: string (or Expression with - resultType string). - :type table: object + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param ingestion_mapping_name: A name of a pre-created csv mapping that was defined on the + target Kusto table. Type: string. + :type ingestion_mapping_name: object + :param ingestion_mapping_as_json: An explicit column mapping description provided in a json + format. Type: string. + :type ingestion_mapping_as_json: object + :param flush_immediately: If set to true, any aggregation will be skipped. Default is false. + Type: boolean. + :type flush_immediately: object """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'ingestion_mapping_name': {'key': 'ingestionMappingName', 'type': 'object'}, + 'ingestion_mapping_as_json': {'key': 'ingestionMappingAsJson', 'type': 'object'}, + 'flush_immediately': {'key': 'flushImmediately', 'type': 'object'}, } def __init__( self, *, - linked_service_name: "LinkedServiceReference", additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, - table: Optional[object] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + ingestion_mapping_name: Optional[object] = None, + ingestion_mapping_as_json: Optional[object] = None, + flush_immediately: Optional[object] = None, **kwargs ): - super(AzureMySqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'AzureMySqlTable' - self.table_name = table_name - self.table = table + super(AzureDataExplorerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'AzureDataExplorerSink' # type: str + self.ingestion_mapping_name = ingestion_mapping_name + self.ingestion_mapping_as_json = ingestion_mapping_as_json + self.flush_immediately = flush_immediately -class AzurePostgreSqlLinkedService(LinkedService): - """Azure PostgreSQL linked service. +class AzureDataExplorerSource(CopySource): + """A copy activity Azure Data Explorer (Kusto) source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: An ODBC connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param password: The Azure key vault secret reference of password in connection string. - :type password: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query: Required. Database query. Should be a Kusto Query Language (KQL) query. Type: + string (or Expression with resultType string). + :type query: object + :param no_truncation: The name of the Boolean option that controls whether truncation is + applied to result-sets that go beyond a certain row-count limit. + :type no_truncation: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).. + :type query_timeout: object """ _validation = { 'type': {'required': True}, + 'query': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + 'no_truncation': {'key': 'noTruncation', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, } def __init__( self, *, + query: object, additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, - description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - connection_string: Optional[object] = None, - password: Optional["AzureKeyVaultSecretReference"] = None, - encrypted_credential: Optional[object] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + no_truncation: Optional[object] = None, + query_timeout: Optional[object] = None, **kwargs ): - super(AzurePostgreSqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'AzurePostgreSql' - self.connection_string = connection_string - self.password = password - self.encrypted_credential = encrypted_credential + super(AzureDataExplorerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'AzureDataExplorerSource' # type: str + self.query = query + self.no_truncation = no_truncation + self.query_timeout = query_timeout -class AzurePostgreSqlTableDataset(Dataset): - """Azure PostgreSQL dataset. +class AzureDataExplorerTableDataset(Dataset): + """The Azure Data Explorer (Kusto) dataset. All required parameters must be populated in order to send to Azure. @@ -3096,15 +3191,9 @@ class AzurePostgreSqlTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The table name of the Azure PostgreSQL database which includes both schema - and table. Type: string (or Expression with resultType string). - :type table_name: object - :param table: The table name of the Azure PostgreSQL database. Type: string (or Expression with - resultType string). + :param table: The table name of the Azure Data Explorer database. Type: string (or Expression + with resultType string). :type table: object - :param schema_type_properties_schema: The schema name of the Azure PostgreSQL database. Type: - string (or Expression with resultType string). - :type schema_type_properties_schema: object """ _validation = { @@ -3122,9 +3211,7 @@ class AzurePostgreSqlTableDataset(Dataset): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } def __init__( @@ -3138,90 +3225,16 @@ def __init__( parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, table: Optional[object] = None, - schema_type_properties_schema: Optional[object] = None, **kwargs ): - super(AzurePostgreSqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'AzurePostgreSqlTable' - self.table_name = table_name + super(AzureDataExplorerTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'AzureDataExplorerTable' # type: str self.table = table - self.schema_type_properties_schema = schema_type_properties_schema -class AzureSearchIndexDataset(Dataset): - """The Azure Search Index. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param index_name: Required. The name of the Azure Search Index. Type: string (or Expression - with resultType string). - :type index_name: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - 'index_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'index_name': {'key': 'typeProperties.indexName', 'type': 'object'}, - } - - def __init__( - self, - *, - linked_service_name: "LinkedServiceReference", - index_name: object, - additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, - **kwargs - ): - super(AzureSearchIndexDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'AzureSearchIndex' - self.index_name = index_name - - -class AzureSearchLinkedService(LinkedService): - """Linked service for Windows Azure Search Service. +class AzureDataLakeAnalyticsLinkedService(LinkedService): + """Azure Data Lake Analytics linked service. All required parameters must be populated in order to send to Azure. @@ -3238,11 +3251,27 @@ class AzureSearchLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param url: Required. URL for Azure Search service. Type: string (or Expression with resultType - string). - :type url: object - :param key: Admin Key for Azure Search service. - :type key: ~azure.synapse.artifacts.models.SecretBase + :param account_name: Required. The Azure Data Lake Analytics account name. Type: string (or + Expression with resultType string). + :type account_name: object + :param service_principal_id: The ID of the application used to authenticate against the Azure + Data Lake Analytics account. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The Key of the application used to authenticate against the Azure + Data Lake Analytics account. + :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase + :param tenant: Required. The name or ID of the tenant to which the service principal belongs. + Type: string (or Expression with resultType string). + :type tenant: object + :param subscription_id: Data Lake Analytics account subscription ID (if different from Data + Factory account). Type: string (or Expression with resultType string). + :type subscription_id: object + :param resource_group_name: Data Lake Analytics account resource group name (if different from + Data Factory account). Type: string (or Expression with resultType string). + :type resource_group_name: object + :param data_lake_analytics_uri: Azure Data Lake Analytics URI Type: string (or Expression with + resultType string). + :type data_lake_analytics_uri: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -3251,7 +3280,8 @@ class AzureSearchLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'url': {'required': True}, + 'account_name': {'required': True}, + 'tenant': {'required': True}, } _attribute_map = { @@ -3261,33 +3291,48 @@ class AzureSearchLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'key': {'key': 'typeProperties.key', 'type': 'SecretBase'}, + 'account_name': {'key': 'typeProperties.accountName', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'subscription_id': {'key': 'typeProperties.subscriptionId', 'type': 'object'}, + 'resource_group_name': {'key': 'typeProperties.resourceGroupName', 'type': 'object'}, + 'data_lake_analytics_uri': {'key': 'typeProperties.dataLakeAnalyticsUri', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, *, - url: object, + account_name: object, + tenant: object, additional_properties: Optional[Dict[str, object]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, - key: Optional["SecretBase"] = None, + service_principal_id: Optional[object] = None, + service_principal_key: Optional["SecretBase"] = None, + subscription_id: Optional[object] = None, + resource_group_name: Optional[object] = None, + data_lake_analytics_uri: Optional[object] = None, encrypted_credential: Optional[object] = None, **kwargs ): - super(AzureSearchLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'AzureSearch' - self.url = url - self.key = key + super(AzureDataLakeAnalyticsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'AzureDataLakeAnalytics' # type: str + self.account_name = account_name + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.subscription_id = subscription_id + self.resource_group_name = resource_group_name + self.data_lake_analytics_uri = data_lake_analytics_uri self.encrypted_credential = encrypted_credential -class AzureSqlDatabaseLinkedService(LinkedService): - """Microsoft Azure SQL Database linked service. +class AzureDataLakeStoreLinkedService(LinkedService): + """Azure Data Lake Store linked service. All required parameters must be populated in order to send to Azure. @@ -3304,20 +3349,27 @@ class AzureSqlDatabaseLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param connection_string: Required. The connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param password: The Azure key vault secret reference of password in connection string. - :type password: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference - :param service_principal_id: The ID of the service principal used to authenticate against Azure - SQL Database. Type: string (or Expression with resultType string). + :param data_lake_store_uri: Required. Data Lake Store service URI. Type: string (or Expression + with resultType string). + :type data_lake_store_uri: object + :param service_principal_id: The ID of the application used to authenticate against the Azure + Data Lake Store account. Type: string (or Expression with resultType string). :type service_principal_id: object - :param service_principal_key: The key of the service principal used to authenticate against - Azure SQL Database. + :param service_principal_key: The Key of the application used to authenticate against the Azure + Data Lake Store account. :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). :type tenant: object + :param account_name: Data Lake Store account name. Type: string (or Expression with resultType + string). + :type account_name: object + :param subscription_id: Data Lake Store account subscription ID (if different from Data Factory + account). Type: string (or Expression with resultType string). + :type subscription_id: object + :param resource_group_name: Data Lake Store account resource group name (if different from Data + Factory account). Type: string (or Expression with resultType string). + :type resource_group_name: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -3326,7 +3378,7 @@ class AzureSqlDatabaseLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'connection_string': {'required': True}, + 'data_lake_store_uri': {'required': True}, } _attribute_map = { @@ -3336,599 +3388,394 @@ class AzureSqlDatabaseLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'data_lake_store_uri': {'key': 'typeProperties.dataLakeStoreUri', 'type': 'object'}, 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'account_name': {'key': 'typeProperties.accountName', 'type': 'object'}, + 'subscription_id': {'key': 'typeProperties.subscriptionId', 'type': 'object'}, + 'resource_group_name': {'key': 'typeProperties.resourceGroupName', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, *, - connection_string: object, + data_lake_store_uri: object, additional_properties: Optional[Dict[str, object]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, - password: Optional["AzureKeyVaultSecretReference"] = None, service_principal_id: Optional[object] = None, service_principal_key: Optional["SecretBase"] = None, tenant: Optional[object] = None, + account_name: Optional[object] = None, + subscription_id: Optional[object] = None, + resource_group_name: Optional[object] = None, encrypted_credential: Optional[object] = None, **kwargs ): - super(AzureSqlDatabaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'AzureSqlDatabase' - self.connection_string = connection_string - self.password = password + super(AzureDataLakeStoreLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'AzureDataLakeStore' # type: str + self.data_lake_store_uri = data_lake_store_uri self.service_principal_id = service_principal_id self.service_principal_key = service_principal_key self.tenant = tenant + self.account_name = account_name + self.subscription_id = subscription_id + self.resource_group_name = resource_group_name self.encrypted_credential = encrypted_credential -class AzureSqlDWLinkedService(LinkedService): - """Azure SQL Data Warehouse linked service. +class AzureDataLakeStoreLocation(DatasetLocation): + """The location of azure data lake store dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Type of dataset storage location.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: Required. The connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param password: The Azure key vault secret reference of password in connection string. - :type password: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference - :param service_principal_id: The ID of the service principal used to authenticate against Azure - SQL Data Warehouse. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The key of the service principal used to authenticate against - Azure SQL Data Warehouse. - :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal belongs. Type: - string (or Expression with resultType string). - :type tenant: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with + :param folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + string). + :type file_name: object """ _validation = { 'type': {'required': True}, - 'connection_string': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, } def __init__( self, *, - connection_string: object, additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, - description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - password: Optional["AzureKeyVaultSecretReference"] = None, - service_principal_id: Optional[object] = None, - service_principal_key: Optional["SecretBase"] = None, - tenant: Optional[object] = None, - encrypted_credential: Optional[object] = None, + folder_path: Optional[object] = None, + file_name: Optional[object] = None, **kwargs ): - super(AzureSqlDWLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'AzureSqlDW' - self.connection_string = connection_string - self.password = password - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.tenant = tenant - self.encrypted_credential = encrypted_credential + super(AzureDataLakeStoreLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) + self.type = 'AzureDataLakeStoreLocation' # type: str -class AzureSqlDWTableDataset(Dataset): - """The Azure SQL Data Warehouse dataset. +class AzureDataLakeStoreReadSettings(StoreReadSettings): + """Azure data lake store read settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. The read setting type.Constant filled by server. :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table - properties instead. - :type table_name: object - :param schema_type_properties_schema: The schema name of the Azure SQL Data Warehouse. Type: - string (or Expression with resultType string). - :type schema_type_properties_schema: object - :param table: The table name of the Azure SQL Data Warehouse. Type: string (or Expression with + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :type recursive: object + :param wildcard_folder_path: ADLS wildcardFolderPath. Type: string (or Expression with resultType string). - :type table: object + :type wildcard_folder_path: object + :param wildcard_file_name: ADLS wildcardFileName. Type: string (or Expression with resultType + string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. Type: string (or + Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression + with resultType string). + :type modified_datetime_end: object """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } def __init__( self, *, - linked_service_name: "LinkedServiceReference", additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, - schema_type_properties_schema: Optional[object] = None, - table: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + recursive: Optional[object] = None, + wildcard_folder_path: Optional[object] = None, + wildcard_file_name: Optional[object] = None, + enable_partition_discovery: Optional[bool] = None, + modified_datetime_start: Optional[object] = None, + modified_datetime_end: Optional[object] = None, **kwargs ): - super(AzureSqlDWTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'AzureSqlDWTable' - self.table_name = table_name - self.schema_type_properties_schema = schema_type_properties_schema - self.table = table + super(AzureDataLakeStoreReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'AzureDataLakeStoreReadSettings' # type: str + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.enable_partition_discovery = enable_partition_discovery + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end -class AzureSqlMILinkedService(LinkedService): - """Azure SQL Managed Instance linked service. +class AzureDataLakeStoreSink(CopySink): + """A copy activity Azure Data Lake Store sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Copy sink type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: Required. The connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param password: The Azure key vault secret reference of password in connection string. - :type password: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference - :param service_principal_id: The ID of the service principal used to authenticate against Azure - SQL Managed Instance. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The key of the service principal used to authenticate against - Azure SQL Managed Instance. - :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase - :param tenant: The name or ID of the tenant to which the service principal belongs. Type: - string (or Expression with resultType string). - :type tenant: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + :param enable_adls_single_file_parallel: Single File Parallel. + :type enable_adls_single_file_parallel: object """ _validation = { 'type': {'required': True}, - 'connection_string': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'enable_adls_single_file_parallel': {'key': 'enableAdlsSingleFileParallel', 'type': 'object'}, } def __init__( self, *, - connection_string: object, additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, - description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - password: Optional["AzureKeyVaultSecretReference"] = None, - service_principal_id: Optional[object] = None, - service_principal_key: Optional["SecretBase"] = None, - tenant: Optional[object] = None, - encrypted_credential: Optional[object] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + copy_behavior: Optional[object] = None, + enable_adls_single_file_parallel: Optional[object] = None, **kwargs ): - super(AzureSqlMILinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'AzureSqlMI' - self.connection_string = connection_string - self.password = password - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.tenant = tenant - self.encrypted_credential = encrypted_credential + super(AzureDataLakeStoreSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'AzureDataLakeStoreSink' # type: str + self.copy_behavior = copy_behavior + self.enable_adls_single_file_parallel = enable_adls_single_file_parallel -class AzureSqlMITableDataset(Dataset): - """The Azure SQL Managed Instance dataset. +class AzureDataLakeStoreSource(CopySource): + """A copy activity Azure Data Lake source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table - properties instead. - :type table_name: object - :param schema_type_properties_schema: The schema name of the Azure SQL Managed Instance. Type: - string (or Expression with resultType string). - :type schema_type_properties_schema: object - :param table: The table name of the Azure SQL Managed Instance dataset. Type: string (or - Expression with resultType string). - :type table: object + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :type recursive: object """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, } def __init__( self, *, - linked_service_name: "LinkedServiceReference", additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, - schema_type_properties_schema: Optional[object] = None, - table: Optional[object] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + recursive: Optional[object] = None, **kwargs ): - super(AzureSqlMITableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'AzureSqlMITable' - self.table_name = table_name - self.schema_type_properties_schema = schema_type_properties_schema - self.table = table + super(AzureDataLakeStoreSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'AzureDataLakeStoreSource' # type: str + self.recursive = recursive -class AzureSqlTableDataset(Dataset): - """The Azure SQL Server database dataset. +class AzureDataLakeStoreWriteSettings(StoreWriteSettings): + """Azure data lake store write settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. The write setting type.Constant filled by server. :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table - properties instead. - :type table_name: object - :param schema_type_properties_schema: The schema name of the Azure SQL database. Type: string - (or Expression with resultType string). - :type schema_type_properties_schema: object - :param table: The table name of the Azure SQL database. Type: string (or Expression with - resultType string). - :type table: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } def __init__( self, *, - linked_service_name: "LinkedServiceReference", additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, - schema_type_properties_schema: Optional[object] = None, - table: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + copy_behavior: Optional[object] = None, **kwargs ): - super(AzureSqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'AzureSqlTable' - self.table_name = table_name - self.schema_type_properties_schema = schema_type_properties_schema - self.table = table + super(AzureDataLakeStoreWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + self.type = 'AzureDataLakeStoreWriteSettings' # type: str -class AzureStorageLinkedService(LinkedService): - """The storage account linked service. +class Resource(msrest.serialization.Model): + """Resource. - All required parameters must be populated in order to send to Azure. + Variables are only populated by the server, and will be ignored when sending a request. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: The connection string. It is mutually exclusive with sasUri property. - Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param account_key: The Azure key vault secret reference of accountKey in connection string. - :type account_key: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference - :param sas_uri: SAS URI of the Azure Storage resource. It is mutually exclusive with - connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. - :type sas_uri: object - :param sas_token: The Azure key vault secret reference of sasToken in sas uri. - :type sas_token: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: str + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str """ _validation = { - 'type': {'required': True}, + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, - 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, - 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, } def __init__( self, - *, - additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, - description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - connection_string: Optional[object] = None, - account_key: Optional["AzureKeyVaultSecretReference"] = None, - sas_uri: Optional[object] = None, - sas_token: Optional["AzureKeyVaultSecretReference"] = None, - encrypted_credential: Optional[str] = None, **kwargs ): - super(AzureStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'AzureStorage' - self.connection_string = connection_string - self.account_key = account_key - self.sas_uri = sas_uri - self.sas_token = sas_token - self.encrypted_credential = encrypted_credential + super(Resource, self).__init__(**kwargs) + self.id = None + self.name = None + self.type = None -class AzureTableDataset(Dataset): - """The Azure Table storage dataset. +class AzureEntityResource(Resource): + """The resource model definition for a Azure Resource Manager resource with an etag. - All required parameters must be populated in order to send to Azure. + Variables are only populated by the server, and will be ignored when sending a request. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: Required. The table name of the Azure Table storage. Type: string (or - Expression with resultType string). - :type table_name: object + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + :ivar etag: Resource Etag. + :vartype etag: str """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - 'table_name': {'required': True}, + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'etag': {'key': 'etag', 'type': 'str'}, } def __init__( self, - *, - linked_service_name: "LinkedServiceReference", - table_name: object, - additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, **kwargs ): - super(AzureTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'AzureTable' - self.table_name = table_name + super(AzureEntityResource, self).__init__(**kwargs) + self.etag = None -class AzureTableStorageLinkedService(LinkedService): - """The azure table storage linked service. +class AzureFileStorageLinkedService(LinkedService): + """Azure File Storage linked service. All required parameters must be populated in order to send to Azure. @@ -3945,24 +3792,23 @@ class AzureTableStorageLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param connection_string: The connection string. It is mutually exclusive with sasUri property. - Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param account_key: The Azure key vault secret reference of accountKey in connection string. - :type account_key: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference - :param sas_uri: SAS URI of the Azure Storage resource. It is mutually exclusive with - connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. - :type sas_uri: object - :param sas_token: The Azure key vault secret reference of sasToken in sas uri. - :type sas_token: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param host: Required. Host name of the server. Type: string (or Expression with resultType + string). + :type host: object + :param user_id: User ID to logon the server. Type: string (or Expression with resultType + string). + :type user_id: object + :param password: Password to logon the server. + :type password: ~azure.synapse.artifacts.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: str + :type encrypted_credential: object """ _validation = { 'type': {'required': True}, + 'host': {'required': True}, } _attribute_map = { @@ -3972,147 +3818,231 @@ class AzureTableStorageLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, - 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, - 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'user_id': {'key': 'typeProperties.userId', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, *, + host: object, additional_properties: Optional[Dict[str, object]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, - connection_string: Optional[object] = None, - account_key: Optional["AzureKeyVaultSecretReference"] = None, - sas_uri: Optional[object] = None, - sas_token: Optional["AzureKeyVaultSecretReference"] = None, - encrypted_credential: Optional[str] = None, + user_id: Optional[object] = None, + password: Optional["SecretBase"] = None, + encrypted_credential: Optional[object] = None, **kwargs ): - super(AzureTableStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'AzureTableStorage' - self.connection_string = connection_string - self.account_key = account_key - self.sas_uri = sas_uri - self.sas_token = sas_token + super(AzureFileStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'AzureFileStorage' # type: str + self.host = host + self.user_id = user_id + self.password = password self.encrypted_credential = encrypted_credential -class BigDataPoolReference(msrest.serialization.Model): - """Big data pool reference. - - Variables are only populated by the server, and will be ignored when sending a request. +class AzureFileStorageLocation(DatasetLocation): + """The location of file server dataset. All required parameters must be populated in order to send to Azure. - :ivar type: Required. Big data pool reference type. Default value: "BigDataPoolReference". - :vartype type: str - :param reference_name: Required. Reference big data pool name. - :type reference_name: str + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location.Constant filled by server. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + string). + :type file_name: object """ _validation = { - 'type': {'required': True, 'constant': True}, - 'reference_name': {'required': True}, + 'type': {'required': True}, } _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'reference_name': {'key': 'referenceName', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, } - type = "BigDataPoolReference" - def __init__( self, *, - reference_name: str, + additional_properties: Optional[Dict[str, object]] = None, + folder_path: Optional[object] = None, + file_name: Optional[object] = None, **kwargs ): - super(BigDataPoolReference, self).__init__(**kwargs) - self.reference_name = reference_name + super(AzureFileStorageLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) + self.type = 'AzureFileStorageLocation' # type: str -class BinaryDataset(Dataset): - """Binary dataset. +class AzureFileStorageReadSettings(StoreReadSettings): + """Azure File Storage read settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. The read setting type.Constant filled by server. :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param location: The location of the Binary storage. - :type location: ~azure.synapse.artifacts.models.DatasetLocation - :param compression: The data compression method used for the binary dataset. - :type compression: ~azure.synapse.artifacts.models.DatasetCompression + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :type recursive: object + :param wildcard_folder_path: Azure File Storage wildcardFolderPath. Type: string (or Expression + with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Azure File Storage wildcardFileName. Type: string (or Expression + with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. Type: string (or + Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression + with resultType string). + :type modified_datetime_end: object """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + max_concurrent_connections: Optional[object] = None, + recursive: Optional[object] = None, + wildcard_folder_path: Optional[object] = None, + wildcard_file_name: Optional[object] = None, + enable_partition_discovery: Optional[bool] = None, + modified_datetime_start: Optional[object] = None, + modified_datetime_end: Optional[object] = None, + **kwargs + ): + super(AzureFileStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'AzureFileStorageReadSettings' # type: str + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.enable_partition_discovery = enable_partition_discovery + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end + + +class AzureFunctionActivity(ExecutionActivity): + """Azure Function activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param linked_service_name: Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param method: Required. Rest API method for target endpoint. Possible values include: "GET", + "POST", "PUT", "DELETE", "OPTIONS", "HEAD", "TRACE". + :type method: str or ~azure.synapse.artifacts.models.AzureFunctionActivityMethod + :param function_name: Required. Name of the Function that the Azure Function Activity will + call. Type: string (or Expression with resultType string). + :type function_name: object + :param headers: Represents the headers that will be sent to the request. For example, to set + the language and type on a request: "headers" : { "Accept-Language": "en-us", "Content-Type": + "application/json" }. Type: string (or Expression with resultType string). + :type headers: object + :param body: Represents the payload that will be sent to the endpoint. Required for POST/PUT + method, not allowed for GET method Type: string (or Expression with resultType string). + :type body: object + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'method': {'required': True}, + 'function_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'method': {'key': 'typeProperties.method', 'type': 'str'}, + 'function_name': {'key': 'typeProperties.functionName', 'type': 'object'}, + 'headers': {'key': 'typeProperties.headers', 'type': 'object'}, + 'body': {'key': 'typeProperties.body', 'type': 'object'}, } def __init__( self, *, - linked_service_name: "LinkedServiceReference", + name: str, + method: Union[str, "AzureFunctionActivityMethod"], + function_name: object, additional_properties: Optional[Dict[str, object]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, - location: Optional["DatasetLocation"] = None, - compression: Optional["DatasetCompression"] = None, + depends_on: Optional[List["ActivityDependency"]] = None, + user_properties: Optional[List["UserProperty"]] = None, + linked_service_name: Optional["LinkedServiceReference"] = None, + policy: Optional["ActivityPolicy"] = None, + headers: Optional[object] = None, + body: Optional[object] = None, **kwargs ): - super(BinaryDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'Binary' - self.location = location - self.compression = compression + super(AzureFunctionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.type = 'AzureFunctionActivity' # type: str + self.method = method + self.function_name = function_name + self.headers = headers + self.body = body -class CassandraLinkedService(LinkedService): - """Linked service for Cassandra data source. +class AzureFunctionLinkedService(LinkedService): + """Azure Function linked service. All required parameters must be populated in order to send to Azure. @@ -4129,20 +4059,11 @@ class CassandraLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param host: Required. Host name for connection. Type: string (or Expression with resultType - string). - :type host: object - :param authentication_type: AuthenticationType to be used for connection. Type: string (or - Expression with resultType string). - :type authentication_type: object - :param port: The port for the connection. Type: integer (or Expression with resultType - integer). - :type port: object - :param username: Username for authentication. Type: string (or Expression with resultType - string). - :type username: object - :param password: Password for authentication. - :type password: ~azure.synapse.artifacts.models.SecretBase + :param function_app_url: Required. The endpoint of the Azure Function App. URL will be in the + format https://:code:``.azurewebsites.net. + :type function_app_url: object + :param function_key: Function or Host key for Azure Function App. + :type function_key: ~azure.synapse.artifacts.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -4151,7 +4072,7 @@ class CassandraLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'host': {'required': True}, + 'function_app_url': {'required': True}, } _attribute_map = { @@ -4161,229 +4082,164 @@ class CassandraLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'function_app_url': {'key': 'typeProperties.functionAppUrl', 'type': 'object'}, + 'function_key': {'key': 'typeProperties.functionKey', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, *, - host: object, + function_app_url: object, additional_properties: Optional[Dict[str, object]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, - authentication_type: Optional[object] = None, - port: Optional[object] = None, - username: Optional[object] = None, - password: Optional["SecretBase"] = None, + function_key: Optional["SecretBase"] = None, encrypted_credential: Optional[object] = None, **kwargs ): - super(CassandraLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Cassandra' - self.host = host - self.authentication_type = authentication_type - self.port = port - self.username = username - self.password = password + super(AzureFunctionLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'AzureFunction' # type: str + self.function_app_url = function_app_url + self.function_key = function_key self.encrypted_credential = encrypted_credential -class CassandraTableDataset(Dataset): - """The Cassandra database dataset. +class AzureKeyVaultLinkedService(LinkedService): + """Azure Key Vault linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Type of linked service.Constant filled by server. :type type: str - :param description: Dataset description. + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. + :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. + :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The table name of the Cassandra database. Type: string (or Expression with - resultType string). - :type table_name: object - :param keyspace: The keyspace of the Cassandra database. Type: string (or Expression with - resultType string). - :type keyspace: object + :param base_url: Required. The base URL of the Azure Key Vault. e.g. + https://myakv.vault.azure.net Type: string (or Expression with resultType string). + :type base_url: object """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, + 'base_url': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'keyspace': {'key': 'typeProperties.keyspace', 'type': 'object'}, + 'base_url': {'key': 'typeProperties.baseUrl', 'type': 'object'}, } def __init__( self, *, - linked_service_name: "LinkedServiceReference", + base_url: object, additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, - keyspace: Optional[object] = None, **kwargs ): - super(CassandraTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'CassandraTable' - self.table_name = table_name - self.keyspace = keyspace + super(AzureKeyVaultLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'AzureKeyVault' # type: str + self.base_url = base_url -class CloudError(msrest.serialization.Model): - """The object that defines the structure of an Azure Synapse error response. +class SecretBase(msrest.serialization.Model): + """The base definition of a secret type. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AzureKeyVaultSecretReference, SecureString. All required parameters must be populated in order to send to Azure. - :param code: Required. Error code. - :type code: str - :param message: Required. Error message. - :type message: str - :param target: Property name/path in request associated with error. - :type target: str - :param details: Array with additional error details. - :type details: list[~azure.synapse.artifacts.models.CloudError] + :param type: Required. Type of the secret.Constant filled by server. + :type type: str """ _validation = { - 'code': {'required': True}, - 'message': {'required': True}, + 'type': {'required': True}, } _attribute_map = { - 'code': {'key': 'error.code', 'type': 'str'}, - 'message': {'key': 'error.message', 'type': 'str'}, - 'target': {'key': 'error.target', 'type': 'str'}, - 'details': {'key': 'error.details', 'type': '[CloudError]'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'AzureKeyVaultSecret': 'AzureKeyVaultSecretReference', 'SecureString': 'SecureString'} } def __init__( self, - *, - code: str, - message: str, - target: Optional[str] = None, - details: Optional[List["CloudError"]] = None, **kwargs ): - super(CloudError, self).__init__(**kwargs) - self.code = code - self.message = message - self.target = target - self.details = details + super(SecretBase, self).__init__(**kwargs) + self.type = None # type: Optional[str] -class CommonDataServiceForAppsEntityDataset(Dataset): - """The Common Data Service for Apps entity dataset. +class AzureKeyVaultSecretReference(SecretBase): + """Azure Key Vault secret reference. All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Type of the secret.Constant filled by server. :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param entity_name: The logical name of the entity. Type: string (or Expression with resultType - string). - :type entity_name: object + :param store: Required. The Azure Key Vault linked service reference. + :type store: ~azure.synapse.artifacts.models.LinkedServiceReference + :param secret_name: Required. The name of the secret in Azure Key Vault. Type: string (or + Expression with resultType string). + :type secret_name: object + :param secret_version: The version of the secret in Azure Key Vault. The default value is the + latest version of the secret. Type: string (or Expression with resultType string). + :type secret_version: object """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, + 'store': {'required': True}, + 'secret_name': {'required': True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, + 'store': {'key': 'store', 'type': 'LinkedServiceReference'}, + 'secret_name': {'key': 'secretName', 'type': 'object'}, + 'secret_version': {'key': 'secretVersion', 'type': 'object'}, } def __init__( self, *, - linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, - entity_name: Optional[object] = None, + store: "LinkedServiceReference", + secret_name: object, + secret_version: Optional[object] = None, **kwargs ): - super(CommonDataServiceForAppsEntityDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'CommonDataServiceForAppsEntity' - self.entity_name = entity_name + super(AzureKeyVaultSecretReference, self).__init__(**kwargs) + self.type = 'AzureKeyVaultSecret' # type: str + self.store = store + self.secret_name = secret_name + self.secret_version = secret_version -class CommonDataServiceForAppsLinkedService(LinkedService): - """Common Data Service for Apps linked service. +class AzureMariaDBLinkedService(LinkedService): + """Azure Database for MariaDB linked service. All required parameters must be populated in order to send to Azure. @@ -4400,54 +4256,11 @@ class CommonDataServiceForAppsLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param deployment_type: Required. The deployment type of the Common Data Service for Apps - instance. 'Online' for Common Data Service for Apps Online and 'OnPremisesWithIfd' for Common - Data Service for Apps on-premises with Ifd. Type: string (or Expression with resultType - string). Possible values include: "Online", "OnPremisesWithIfd". - :type deployment_type: str or ~azure.synapse.artifacts.models.DynamicsDeploymentType - :param host_name: The host name of the on-premises Common Data Service for Apps server. The - property is required for on-prem and not allowed for online. Type: string (or Expression with - resultType string). - :type host_name: object - :param port: The port of on-premises Common Data Service for Apps server. The property is - required for on-prem and not allowed for online. Default is 443. Type: integer (or Expression - with resultType integer), minimum: 0. - :type port: object - :param service_uri: The URL to the Microsoft Common Data Service for Apps server. The property - is required for on-line and not allowed for on-prem. Type: string (or Expression with - resultType string). - :type service_uri: object - :param organization_name: The organization name of the Common Data Service for Apps instance. - The property is required for on-prem and required for online when there are more than one - Common Data Service for Apps instances associated with the user. Type: string (or Expression - with resultType string). - :type organization_name: object - :param authentication_type: Required. The authentication type to connect to Common Data Service - for Apps server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario. - 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or - Expression with resultType string). Possible values include: "Office365", "Ifd", - "AADServicePrincipal". - :type authentication_type: str or ~azure.synapse.artifacts.models.DynamicsAuthenticationType - :param username: User name to access the Common Data Service for Apps instance. Type: string - (or Expression with resultType string). - :type username: object - :param password: Password to access the Common Data Service for Apps instance. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param service_principal_id: The client ID of the application in Azure Active Directory used - for Server-To-Server authentication. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_credential_type: The service principal credential type to use in - Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' - for certificate. Type: string (or Expression with resultType string). Possible values include: - "ServicePrincipalKey", "ServicePrincipalCert". - :type service_principal_credential_type: str or - ~azure.synapse.artifacts.models.DynamicsServicePrincipalCredentialType - :param service_principal_credential: The credential of the service principal object in Azure - Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', - servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If - servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only - be AzureKeyVaultSecretReference. - :type service_principal_credential: ~azure.synapse.artifacts.models.SecretBase + :param connection_string: An ODBC connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param pwd: The Azure key vault secret reference of password in connection string. + :type pwd: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -4456,8 +4269,6 @@ class CommonDataServiceForAppsLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'deployment_type': {'required': True}, - 'authentication_type': {'required': True}, } _attribute_map = { @@ -4467,151 +4278,90 @@ class CommonDataServiceForAppsLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, - 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, - 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'str'}, - 'service_principal_credential': {'key': 'typeProperties.servicePrincipalCredential', 'type': 'SecretBase'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, *, - deployment_type: Union[str, "DynamicsDeploymentType"], - authentication_type: Union[str, "DynamicsAuthenticationType"], additional_properties: Optional[Dict[str, object]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, - host_name: Optional[object] = None, - port: Optional[object] = None, - service_uri: Optional[object] = None, - organization_name: Optional[object] = None, - username: Optional[object] = None, - password: Optional["SecretBase"] = None, - service_principal_id: Optional[object] = None, - service_principal_credential_type: Optional[Union[str, "DynamicsServicePrincipalCredentialType"]] = None, - service_principal_credential: Optional["SecretBase"] = None, + connection_string: Optional[object] = None, + pwd: Optional["AzureKeyVaultSecretReference"] = None, encrypted_credential: Optional[object] = None, **kwargs ): - super(CommonDataServiceForAppsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'CommonDataServiceForApps' - self.deployment_type = deployment_type - self.host_name = host_name - self.port = port - self.service_uri = service_uri - self.organization_name = organization_name - self.authentication_type = authentication_type - self.username = username - self.password = password - self.service_principal_id = service_principal_id - self.service_principal_credential_type = service_principal_credential_type - self.service_principal_credential = service_principal_credential + super(AzureMariaDBLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'AzureMariaDB' # type: str + self.connection_string = connection_string + self.pwd = pwd self.encrypted_credential = encrypted_credential -class ConcurLinkedService(LinkedService): - """Concur Service linked service. +class AzureMariaDBSource(TabularSource): + """A copy activity Azure MariaDB source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param client_id: Required. Application client_id supplied by Concur App Management. - :type client_id: object - :param username: Required. The user name that you use to access Concur Service. - :type username: object - :param password: The password corresponding to the user name that you provided in the username - field. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using - HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name in the server's - certificate to match the host name of the server when connecting over SSL. The default value is - true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of the server when - connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object """ _validation = { 'type': {'required': True}, - 'client_id': {'required': True}, - 'username': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, } def __init__( self, *, - client_id: object, - username: object, additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, - description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - password: Optional["SecretBase"] = None, - use_encrypted_endpoints: Optional[object] = None, - use_host_verification: Optional[object] = None, - use_peer_verification: Optional[object] = None, - encrypted_credential: Optional[object] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, **kwargs ): - super(ConcurLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Concur' - self.client_id = client_id - self.username = username - self.password = password - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification - self.encrypted_credential = encrypted_credential + super(AzureMariaDBSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'AzureMariaDBSource' # type: str + self.query = query -class ConcurObjectDataset(Dataset): - """Concur Service dataset. +class AzureMariaDBTableDataset(Dataset): + """Azure Database for MariaDB dataset. All required parameters must be populated in order to send to Azure. @@ -4673,13 +4423,13 @@ def __init__( table_name: Optional[object] = None, **kwargs ): - super(ConcurObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'ConcurObject' + super(AzureMariaDBTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'AzureMariaDBTable' # type: str self.table_name = table_name -class ControlActivity(Activity): - """Base class for all control activities like IfCondition, ForEach , Until. +class AzureMLBatchExecutionActivity(ExecutionActivity): + """Azure ML Batch Execution activity. All required parameters must be populated in order to send to Azure. @@ -4696,6 +4446,25 @@ class ControlActivity(Activity): :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param linked_service_name: Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param global_parameters: Key,Value pairs to be passed to the Azure ML Batch Execution Service + endpoint. Keys must match the names of web service parameters defined in the published Azure ML + web service. Values will be passed in the GlobalParameters property of the Azure ML batch + execution request. + :type global_parameters: dict[str, object] + :param web_service_outputs: Key,Value pairs, mapping the names of Azure ML endpoint's Web + Service Outputs to AzureMLWebServiceFile objects specifying the output Blob locations. This + information will be passed in the WebServiceOutputs property of the Azure ML batch execution + request. + :type web_service_outputs: dict[str, ~azure.synapse.artifacts.models.AzureMLWebServiceFile] + :param web_service_inputs: Key,Value pairs, mapping the names of Azure ML endpoint's Web + Service Inputs to AzureMLWebServiceFile objects specifying the input Blob locations.. This + information will be passed in the WebServiceInputs property of the Azure ML batch execution + request. + :type web_service_inputs: dict[str, ~azure.synapse.artifacts.models.AzureMLWebServiceFile] """ _validation = { @@ -4710,6 +4479,11 @@ class ControlActivity(Activity): 'description': {'key': 'description', 'type': 'str'}, 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'global_parameters': {'key': 'typeProperties.globalParameters', 'type': '{object}'}, + 'web_service_outputs': {'key': 'typeProperties.webServiceOutputs', 'type': '{AzureMLWebServiceFile}'}, + 'web_service_inputs': {'key': 'typeProperties.webServiceInputs', 'type': '{AzureMLWebServiceFile}'}, } def __init__( @@ -4720,14 +4494,22 @@ def __init__( description: Optional[str] = None, depends_on: Optional[List["ActivityDependency"]] = None, user_properties: Optional[List["UserProperty"]] = None, + linked_service_name: Optional["LinkedServiceReference"] = None, + policy: Optional["ActivityPolicy"] = None, + global_parameters: Optional[Dict[str, object]] = None, + web_service_outputs: Optional[Dict[str, "AzureMLWebServiceFile"]] = None, + web_service_inputs: Optional[Dict[str, "AzureMLWebServiceFile"]] = None, **kwargs ): - super(ControlActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.type: str = 'Container' + super(AzureMLBatchExecutionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.type = 'AzureMLBatchExecution' # type: str + self.global_parameters = global_parameters + self.web_service_outputs = web_service_outputs + self.web_service_inputs = web_service_inputs -class CopyActivity(ExecutionActivity): - """Copy activity. +class AzureMLExecutePipelineActivity(ExecutionActivity): + """Azure ML Execute Pipeline activity. All required parameters must be populated in order to send to Azure. @@ -4748,46 +4530,33 @@ class CopyActivity(ExecutionActivity): :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param inputs: List of inputs for the activity. - :type inputs: list[~azure.synapse.artifacts.models.DatasetReference] - :param outputs: List of outputs for the activity. - :type outputs: list[~azure.synapse.artifacts.models.DatasetReference] - :param source: Required. Copy activity source. - :type source: ~azure.synapse.artifacts.models.CopySource - :param sink: Required. Copy activity sink. - :type sink: ~azure.synapse.artifacts.models.CopySink - :param translator: Copy activity translator. If not specified, tabular translator is used. - :type translator: object - :param enable_staging: Specifies whether to copy data via an interim staging. Default value is - false. Type: boolean (or Expression with resultType boolean). - :type enable_staging: object - :param staging_settings: Specifies interim staging settings when EnableStaging is true. - :type staging_settings: ~azure.synapse.artifacts.models.StagingSettings - :param parallel_copies: Maximum number of concurrent sessions opened on the source or sink to - avoid overloading the data store. Type: integer (or Expression with resultType integer), - minimum: 0. - :type parallel_copies: object - :param data_integration_units: Maximum number of data integration units that can be used to - perform this data movement. Type: integer (or Expression with resultType integer), minimum: 0. - :type data_integration_units: object - :param enable_skip_incompatible_row: Whether to skip incompatible row. Default value is false. - Type: boolean (or Expression with resultType boolean). - :type enable_skip_incompatible_row: object - :param redirect_incompatible_row_settings: Redirect incompatible row settings when - EnableSkipIncompatibleRow is true. - :type redirect_incompatible_row_settings: - ~azure.synapse.artifacts.models.RedirectIncompatibleRowSettings - :param preserve_rules: Preserve Rules. - :type preserve_rules: list[object] - :param preserve: Preserve rules. - :type preserve: list[object] + :param ml_pipeline_id: Required. ID of the published Azure ML pipeline. Type: string (or + Expression with resultType string). + :type ml_pipeline_id: object + :param experiment_name: Run history experiment name of the pipeline run. This information will + be passed in the ExperimentName property of the published pipeline execution request. Type: + string (or Expression with resultType string). + :type experiment_name: object + :param ml_pipeline_parameters: Key,Value pairs to be passed to the published Azure ML pipeline + endpoint. Keys must match the names of pipeline parameters defined in the published pipeline. + Values will be passed in the ParameterAssignments property of the published pipeline execution + request. Type: object with key value pairs (or Expression with resultType object). + :type ml_pipeline_parameters: object + :param ml_parent_run_id: The parent Azure ML Service pipeline run id. This information will be + passed in the ParentRunId property of the published pipeline execution request. Type: string + (or Expression with resultType string). + :type ml_parent_run_id: object + :param continue_on_step_failure: Whether to continue execution of other steps in the + PipelineRun if a step fails. This information will be passed in the continueOnStepFailure + property of the published pipeline execution request. Type: boolean (or Expression with + resultType boolean). + :type continue_on_step_failure: object """ _validation = { 'name': {'required': True}, 'type': {'required': True}, - 'source': {'required': True}, - 'sink': {'required': True}, + 'ml_pipeline_id': {'required': True}, } _attribute_map = { @@ -4799,191 +4568,133 @@ class CopyActivity(ExecutionActivity): 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'inputs': {'key': 'inputs', 'type': '[DatasetReference]'}, - 'outputs': {'key': 'outputs', 'type': '[DatasetReference]'}, - 'source': {'key': 'typeProperties.source', 'type': 'CopySource'}, - 'sink': {'key': 'typeProperties.sink', 'type': 'CopySink'}, - 'translator': {'key': 'typeProperties.translator', 'type': 'object'}, - 'enable_staging': {'key': 'typeProperties.enableStaging', 'type': 'object'}, - 'staging_settings': {'key': 'typeProperties.stagingSettings', 'type': 'StagingSettings'}, - 'parallel_copies': {'key': 'typeProperties.parallelCopies', 'type': 'object'}, - 'data_integration_units': {'key': 'typeProperties.dataIntegrationUnits', 'type': 'object'}, - 'enable_skip_incompatible_row': {'key': 'typeProperties.enableSkipIncompatibleRow', 'type': 'object'}, - 'redirect_incompatible_row_settings': {'key': 'typeProperties.redirectIncompatibleRowSettings', 'type': 'RedirectIncompatibleRowSettings'}, - 'preserve_rules': {'key': 'typeProperties.preserveRules', 'type': '[object]'}, - 'preserve': {'key': 'typeProperties.preserve', 'type': '[object]'}, + 'ml_pipeline_id': {'key': 'typeProperties.mlPipelineId', 'type': 'object'}, + 'experiment_name': {'key': 'typeProperties.experimentName', 'type': 'object'}, + 'ml_pipeline_parameters': {'key': 'typeProperties.mlPipelineParameters', 'type': 'object'}, + 'ml_parent_run_id': {'key': 'typeProperties.mlParentRunId', 'type': 'object'}, + 'continue_on_step_failure': {'key': 'typeProperties.continueOnStepFailure', 'type': 'object'}, } def __init__( self, *, name: str, - source: "CopySource", - sink: "CopySink", + ml_pipeline_id: object, additional_properties: Optional[Dict[str, object]] = None, description: Optional[str] = None, depends_on: Optional[List["ActivityDependency"]] = None, user_properties: Optional[List["UserProperty"]] = None, linked_service_name: Optional["LinkedServiceReference"] = None, policy: Optional["ActivityPolicy"] = None, - inputs: Optional[List["DatasetReference"]] = None, - outputs: Optional[List["DatasetReference"]] = None, - translator: Optional[object] = None, - enable_staging: Optional[object] = None, - staging_settings: Optional["StagingSettings"] = None, - parallel_copies: Optional[object] = None, - data_integration_units: Optional[object] = None, - enable_skip_incompatible_row: Optional[object] = None, - redirect_incompatible_row_settings: Optional["RedirectIncompatibleRowSettings"] = None, - preserve_rules: Optional[List[object]] = None, - preserve: Optional[List[object]] = None, + experiment_name: Optional[object] = None, + ml_pipeline_parameters: Optional[object] = None, + ml_parent_run_id: Optional[object] = None, + continue_on_step_failure: Optional[object] = None, **kwargs ): - super(CopyActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type: str = 'Copy' - self.inputs = inputs - self.outputs = outputs - self.source = source - self.sink = sink - self.translator = translator - self.enable_staging = enable_staging - self.staging_settings = staging_settings - self.parallel_copies = parallel_copies - self.data_integration_units = data_integration_units - self.enable_skip_incompatible_row = enable_skip_incompatible_row - self.redirect_incompatible_row_settings = redirect_incompatible_row_settings - self.preserve_rules = preserve_rules - self.preserve = preserve - + super(AzureMLExecutePipelineActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.type = 'AzureMLExecutePipeline' # type: str + self.ml_pipeline_id = ml_pipeline_id + self.experiment_name = experiment_name + self.ml_pipeline_parameters = ml_pipeline_parameters + self.ml_parent_run_id = ml_parent_run_id + self.continue_on_step_failure = continue_on_step_failure -class CopySink(msrest.serialization.Model): - """A copy activity sink. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: . +class AzureMLLinkedService(LinkedService): + """Azure ML Studio Web Service linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Copy sink type.Constant filled by server. + :param type: Required. Type of linked service.Constant filled by server. :type type: str - :param write_batch_size: Write batch size. Type: integer (or Expression with resultType - integer), minimum: 0. - :type write_batch_size: object - :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type write_batch_timeout: object - :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType - integer). - :type sink_retry_count: object - :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type sink_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the sink data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param ml_endpoint: Required. The Batch Execution REST URL for an Azure ML Studio Web Service + endpoint. Type: string (or Expression with resultType string). + :type ml_endpoint: object + :param api_key: Required. The API key for accessing the Azure ML model endpoint. + :type api_key: ~azure.synapse.artifacts.models.SecretBase + :param update_resource_endpoint: The Update Resource REST URL for an Azure ML Studio Web + Service endpoint. Type: string (or Expression with resultType string). + :type update_resource_endpoint: object + :param service_principal_id: The ID of the service principal used to authenticate against the + ARM-based updateResourceEndpoint of an Azure ML Studio web service. Type: string (or Expression + with resultType string). + :type service_principal_id: object + :param service_principal_key: The key of the service principal used to authenticate against the + ARM-based updateResourceEndpoint of an Azure ML Studio web service. + :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal belongs. Type: + string (or Expression with resultType string). + :type tenant: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object """ _validation = { 'type': {'required': True}, + 'ml_endpoint': {'required': True}, + 'api_key': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, - 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, - 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, - 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - } - - _subtype_map = { - 'type': {} + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'ml_endpoint': {'key': 'typeProperties.mlEndpoint', 'type': 'object'}, + 'api_key': {'key': 'typeProperties.apiKey', 'type': 'SecretBase'}, + 'update_resource_endpoint': {'key': 'typeProperties.updateResourceEndpoint', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, *, + ml_endpoint: object, + api_key: "SecretBase", additional_properties: Optional[Dict[str, object]] = None, - write_batch_size: Optional[object] = None, - write_batch_timeout: Optional[object] = None, - sink_retry_count: Optional[object] = None, - sink_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + update_resource_endpoint: Optional[object] = None, + service_principal_id: Optional[object] = None, + service_principal_key: Optional["SecretBase"] = None, + tenant: Optional[object] = None, + encrypted_credential: Optional[object] = None, **kwargs ): - super(CopySink, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.type: str = 'CopySink' - self.write_batch_size = write_batch_size - self.write_batch_timeout = write_batch_timeout - self.sink_retry_count = sink_retry_count - self.sink_retry_wait = sink_retry_wait - self.max_concurrent_connections = max_concurrent_connections - - -class CopySource(msrest.serialization.Model): - """A copy activity source. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: . - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Copy source type.Constant filled by server. - :type type: str - :param source_retry_count: Source retry count. Type: integer (or Expression with resultType - integer). - :type source_retry_count: object - :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type source_retry_wait: object - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, - 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - } - - _subtype_map = { - 'type': {} - } - - def __init__( - self, - *, - additional_properties: Optional[Dict[str, object]] = None, - source_retry_count: Optional[object] = None, - source_retry_wait: Optional[object] = None, - max_concurrent_connections: Optional[object] = None, - **kwargs - ): - super(CopySource, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.type: str = 'CopySource' - self.source_retry_count = source_retry_count - self.source_retry_wait = source_retry_wait - self.max_concurrent_connections = max_concurrent_connections + super(AzureMLLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'AzureML' # type: str + self.ml_endpoint = ml_endpoint + self.api_key = api_key + self.update_resource_endpoint = update_resource_endpoint + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.encrypted_credential = encrypted_credential -class CosmosDbLinkedService(LinkedService): - """Microsoft Azure Cosmos Database (CosmosDB) linked service. +class AzureMLServiceLinkedService(LinkedService): + """Azure ML Service linked service. All required parameters must be populated in order to send to Azure. @@ -5000,17 +4711,25 @@ class CosmosDbLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param connection_string: The connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param account_endpoint: The endpoint of the Azure CosmosDB account. Type: string (or + :param subscription_id: Required. Azure ML Service workspace subscription ID. Type: string (or Expression with resultType string). - :type account_endpoint: object - :param database: The name of the database. Type: string (or Expression with resultType string). - :type database: object - :param account_key: The account key of the Azure CosmosDB account. Type: SecureString or - AzureKeyVaultSecretReference. - :type account_key: ~azure.synapse.artifacts.models.SecretBase + :type subscription_id: object + :param resource_group_name: Required. Azure ML Service workspace resource group name. Type: + string (or Expression with resultType string). + :type resource_group_name: object + :param ml_workspace_name: Required. Azure ML Service workspace name. Type: string (or + Expression with resultType string). + :type ml_workspace_name: object + :param service_principal_id: The ID of the service principal used to authenticate against the + endpoint of a published Azure ML Service pipeline. Type: string (or Expression with resultType + string). + :type service_principal_id: object + :param service_principal_key: The key of the service principal used to authenticate against the + endpoint of a published Azure ML Service pipeline. + :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal belongs. Type: + string (or Expression with resultType string). + :type tenant: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -5019,6 +4738,9 @@ class CosmosDbLinkedService(LinkedService): _validation = { 'type': {'required': True}, + 'subscription_id': {'required': True}, + 'resource_group_name': {'required': True}, + 'ml_workspace_name': {'required': True}, } _attribute_map = { @@ -5028,171 +4750,342 @@ class CosmosDbLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'account_endpoint': {'key': 'typeProperties.accountEndpoint', 'type': 'object'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, - 'account_key': {'key': 'typeProperties.accountKey', 'type': 'SecretBase'}, + 'subscription_id': {'key': 'typeProperties.subscriptionId', 'type': 'object'}, + 'resource_group_name': {'key': 'typeProperties.resourceGroupName', 'type': 'object'}, + 'ml_workspace_name': {'key': 'typeProperties.mlWorkspaceName', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, *, + subscription_id: object, + resource_group_name: object, + ml_workspace_name: object, additional_properties: Optional[Dict[str, object]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, - connection_string: Optional[object] = None, - account_endpoint: Optional[object] = None, - database: Optional[object] = None, - account_key: Optional["SecretBase"] = None, + service_principal_id: Optional[object] = None, + service_principal_key: Optional["SecretBase"] = None, + tenant: Optional[object] = None, encrypted_credential: Optional[object] = None, **kwargs ): - super(CosmosDbLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'CosmosDb' - self.connection_string = connection_string - self.account_endpoint = account_endpoint - self.database = database - self.account_key = account_key + super(AzureMLServiceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'AzureMLService' # type: str + self.subscription_id = subscription_id + self.resource_group_name = resource_group_name + self.ml_workspace_name = ml_workspace_name + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant self.encrypted_credential = encrypted_credential -class CosmosDbMongoDbApiCollectionDataset(Dataset): - """The CosmosDB (MongoDB API) database dataset. +class AzureMLUpdateResourceActivity(ExecutionActivity): + """Azure ML Update Resource management activity. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. :type type: str - :param description: Dataset description. + :param description: Activity description. :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param collection: Required. The collection name of the CosmosDB (MongoDB API) database. Type: - string (or Expression with resultType string). - :type collection: object + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param trained_model_name: Required. Name of the Trained Model module in the Web Service + experiment to be updated. Type: string (or Expression with resultType string). + :type trained_model_name: object + :param trained_model_linked_service_name: Required. Name of Azure Storage linked service + holding the .ilearner file that will be uploaded by the update operation. + :type trained_model_linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param trained_model_file_path: Required. The relative file path in trainedModelLinkedService + to represent the .ilearner file that will be uploaded by the update operation. Type: string + (or Expression with resultType string). + :type trained_model_file_path: object """ _validation = { + 'name': {'required': True}, 'type': {'required': True}, - 'linked_service_name': {'required': True}, - 'collection': {'required': True}, + 'trained_model_name': {'required': True}, + 'trained_model_linked_service_name': {'required': True}, + 'trained_model_file_path': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'trained_model_name': {'key': 'typeProperties.trainedModelName', 'type': 'object'}, + 'trained_model_linked_service_name': {'key': 'typeProperties.trainedModelLinkedServiceName', 'type': 'LinkedServiceReference'}, + 'trained_model_file_path': {'key': 'typeProperties.trainedModelFilePath', 'type': 'object'}, } def __init__( self, *, - linked_service_name: "LinkedServiceReference", - collection: object, + name: str, + trained_model_name: object, + trained_model_linked_service_name: "LinkedServiceReference", + trained_model_file_path: object, additional_properties: Optional[Dict[str, object]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, + depends_on: Optional[List["ActivityDependency"]] = None, + user_properties: Optional[List["UserProperty"]] = None, + linked_service_name: Optional["LinkedServiceReference"] = None, + policy: Optional["ActivityPolicy"] = None, **kwargs ): - super(CosmosDbMongoDbApiCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'CosmosDbMongoDbApiCollection' - self.collection = collection + super(AzureMLUpdateResourceActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.type = 'AzureMLUpdateResource' # type: str + self.trained_model_name = trained_model_name + self.trained_model_linked_service_name = trained_model_linked_service_name + self.trained_model_file_path = trained_model_file_path -class CosmosDbMongoDbApiLinkedService(LinkedService): - """Linked service for CosmosDB (MongoDB API) data source. +class AzureMLWebServiceFile(msrest.serialization.Model): + """Azure ML WebService Input/Output file. All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: Required. The CosmosDB (MongoDB API) connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param database: Required. The name of the CosmosDB (MongoDB API) database that you want to - access. Type: string (or Expression with resultType string). - :type database: object + :param file_path: Required. The relative file path, including container name, in the Azure Blob + Storage specified by the LinkedService. Type: string (or Expression with resultType string). + :type file_path: object + :param linked_service_name: Required. Reference to an Azure Storage LinkedService, where Azure + ML WebService Input/Output file located. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference """ _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, - 'database': {'required': True}, + 'file_path': {'required': True}, + 'linked_service_name': {'required': True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'file_path': {'key': 'filePath', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + } + + def __init__( + self, + *, + file_path: object, + linked_service_name: "LinkedServiceReference", + **kwargs + ): + super(AzureMLWebServiceFile, self).__init__(**kwargs) + self.file_path = file_path + self.linked_service_name = linked_service_name + + +class AzureMySqlLinkedService(LinkedService): + """Azure MySQL database linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param connection_string: Required. The connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param password: The Azure key vault secret reference of password in connection string. + :type password: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, *, connection_string: object, - database: object, additional_properties: Optional[Dict[str, object]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, + password: Optional["AzureKeyVaultSecretReference"] = None, + encrypted_credential: Optional[object] = None, **kwargs ): - super(CosmosDbMongoDbApiLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'CosmosDbMongoDbApi' + super(AzureMySqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'AzureMySql' # type: str self.connection_string = connection_string - self.database = database + self.password = password + self.encrypted_credential = encrypted_credential -class CosmosDbSqlApiCollectionDataset(Dataset): - """Microsoft Azure CosmosDB (SQL API) Collection dataset. +class AzureMySqlSink(CopySink): + """A copy activity Azure MySql sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param pre_copy_script: A query to execute before starting the copy. Type: string (or + Expression with resultType string). + :type pre_copy_script: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + pre_copy_script: Optional[object] = None, + **kwargs + ): + super(AzureMySqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'AzureMySqlSink' # type: str + self.pre_copy_script = pre_copy_script + + +class AzureMySqlSource(TabularSource): + """A copy activity Azure MySQL source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: Database query. Type: string (or Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(AzureMySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'AzureMySqlSource' # type: str + self.query = query + + +class AzureMySqlTableDataset(Dataset): + """The Azure MySQL database dataset. All required parameters must be populated in order to send to Azure. @@ -5218,15 +5111,17 @@ class CosmosDbSqlApiCollectionDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param collection_name: Required. CosmosDB (SQL API) collection name. Type: string (or - Expression with resultType string). - :type collection_name: object + :param table_name: The Azure MySQL database table name. Type: string (or Expression with + resultType string). + :type table_name: object + :param table: The name of Azure MySQL database table. Type: string (or Expression with + resultType string). + :type table: object """ _validation = { 'type': {'required': True}, 'linked_service_name': {'required': True}, - 'collection_name': {'required': True}, } _attribute_map = { @@ -5239,14 +5134,14 @@ class CosmosDbSqlApiCollectionDataset(Dataset): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'collection_name': {'key': 'typeProperties.collectionName', 'type': 'object'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, } def __init__( self, *, linked_service_name: "LinkedServiceReference", - collection_name: object, additional_properties: Optional[Dict[str, object]] = None, description: Optional[str] = None, structure: Optional[object] = None, @@ -5254,15 +5149,18 @@ def __init__( parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, folder: Optional["DatasetFolder"] = None, + table_name: Optional[object] = None, + table: Optional[object] = None, **kwargs ): - super(CosmosDbSqlApiCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'CosmosDbSqlApiCollection' - self.collection_name = collection_name + super(AzureMySqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'AzureMySqlTable' # type: str + self.table_name = table_name + self.table = table -class CouchbaseLinkedService(LinkedService): - """Couchbase server linked service. +class AzurePostgreSqlLinkedService(LinkedService): + """Azure PostgreSQL linked service. All required parameters must be populated in order to send to Azure. @@ -5282,8 +5180,8 @@ class CouchbaseLinkedService(LinkedService): :param connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object - :param cred_string: The Azure key vault secret reference of credString in connection string. - :type cred_string: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param password: The Azure key vault secret reference of password in connection string. + :type password: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -5302,7 +5200,7 @@ class CouchbaseLinkedService(LinkedService): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'cred_string': {'key': 'typeProperties.credString', 'type': 'AzureKeyVaultSecretReference'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -5315,299 +5213,407 @@ def __init__( parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, connection_string: Optional[object] = None, - cred_string: Optional["AzureKeyVaultSecretReference"] = None, + password: Optional["AzureKeyVaultSecretReference"] = None, encrypted_credential: Optional[object] = None, **kwargs ): - super(CouchbaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Couchbase' + super(AzurePostgreSqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'AzurePostgreSql' # type: str self.connection_string = connection_string - self.cred_string = cred_string + self.password = password self.encrypted_credential = encrypted_credential -class CouchbaseTableDataset(Dataset): - """Couchbase server dataset. +class AzurePostgreSqlSink(CopySink): + """A copy activity Azure PostgreSQL sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Copy sink type.Constant filled by server. :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param pre_copy_script: A query to execute before starting the copy. Type: string (or + Expression with resultType string). + :type pre_copy_script: object """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } def __init__( self, *, - linked_service_name: "LinkedServiceReference", additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + pre_copy_script: Optional[object] = None, **kwargs ): - super(CouchbaseTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'CouchbaseTable' - self.table_name = table_name + super(AzurePostgreSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'AzurePostgreSqlSink' # type: str + self.pre_copy_script = pre_copy_script -class CreateDataFlowDebugSessionRequest(msrest.serialization.Model): - """Request body structure for creating data flow debug session. +class AzurePostgreSqlSource(TabularSource): + """A copy activity Azure PostgreSQL source. - :param data_flow_name: The name of the data flow. - :type data_flow_name: str - :param existing_cluster_id: The ID of existing Databricks cluster. - :type existing_cluster_id: str - :param cluster_timeout: Timeout setting for Databricks cluster. - :type cluster_timeout: int - :param new_cluster_name: The name of new Databricks cluster. - :type new_cluster_name: str - :param new_cluster_node_type: The type of new Databricks cluster. - :type new_cluster_node_type: str - :param data_bricks_linked_service: Data bricks linked service. - :type data_bricks_linked_service: ~azure.synapse.artifacts.models.LinkedServiceResource + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object """ + _validation = { + 'type': {'required': True}, + } + _attribute_map = { - 'data_flow_name': {'key': 'dataFlowName', 'type': 'str'}, - 'existing_cluster_id': {'key': 'existingClusterId', 'type': 'str'}, - 'cluster_timeout': {'key': 'clusterTimeout', 'type': 'int'}, - 'new_cluster_name': {'key': 'newClusterName', 'type': 'str'}, - 'new_cluster_node_type': {'key': 'newClusterNodeType', 'type': 'str'}, - 'data_bricks_linked_service': {'key': 'dataBricksLinkedService', 'type': 'LinkedServiceResource'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, } def __init__( self, *, - data_flow_name: Optional[str] = None, - existing_cluster_id: Optional[str] = None, - cluster_timeout: Optional[int] = None, - new_cluster_name: Optional[str] = None, - new_cluster_node_type: Optional[str] = None, - data_bricks_linked_service: Optional["LinkedServiceResource"] = None, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, **kwargs ): - super(CreateDataFlowDebugSessionRequest, self).__init__(**kwargs) - self.data_flow_name = data_flow_name - self.existing_cluster_id = existing_cluster_id - self.cluster_timeout = cluster_timeout - self.new_cluster_name = new_cluster_name - self.new_cluster_node_type = new_cluster_node_type - self.data_bricks_linked_service = data_bricks_linked_service + super(AzurePostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'AzurePostgreSqlSource' # type: str + self.query = query -class CreateDataFlowDebugSessionResponse(msrest.serialization.Model): - """Response body structure for creating data flow debug session. +class AzurePostgreSqlTableDataset(Dataset): + """Azure PostgreSQL dataset. - :param session_id: The ID of data flow debug session. - :type session_id: str + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: The table name of the Azure PostgreSQL database which includes both schema + and table. Type: string (or Expression with resultType string). + :type table_name: object + :param table: The table name of the Azure PostgreSQL database. Type: string (or Expression with + resultType string). + :type table: object + :param schema_type_properties_schema: The schema name of the Azure PostgreSQL database. Type: + string (or Expression with resultType string). + :type schema_type_properties_schema: object """ + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + _attribute_map = { - 'session_id': {'key': 'sessionId', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } def __init__( self, *, - session_id: Optional[str] = None, + linked_service_name: "LinkedServiceReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + table_name: Optional[object] = None, + table: Optional[object] = None, + schema_type_properties_schema: Optional[object] = None, **kwargs ): - super(CreateDataFlowDebugSessionResponse, self).__init__(**kwargs) - self.session_id = session_id + super(AzurePostgreSqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'AzurePostgreSqlTable' # type: str + self.table_name = table_name + self.table = table + self.schema_type_properties_schema = schema_type_properties_schema -class CreateRunResponse(msrest.serialization.Model): - """Response body with a run identifier. +class AzureQueueSink(CopySink): + """A copy activity Azure Queue sink. All required parameters must be populated in order to send to Azure. - :param run_id: Required. Identifier of a run. - :type run_id: str + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object """ _validation = { - 'run_id': {'required': True}, + 'type': {'required': True}, } _attribute_map = { - 'run_id': {'key': 'runId', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, } def __init__( self, *, - run_id: str, + additional_properties: Optional[Dict[str, object]] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, **kwargs ): - super(CreateRunResponse, self).__init__(**kwargs) - self.run_id = run_id + super(AzureQueueSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'AzureQueueSink' # type: str -class CustomActivity(ExecutionActivity): - """Custom activity type. +class AzureSearchIndexDataset(Dataset): + """The Azure Search Index. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. Type of dataset.Constant filled by server. :type type: str - :param description: Activity description. + :param description: Dataset description. :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param command: Required. Command for custom activity Type: string (or Expression with - resultType string). - :type command: object - :param resource_linked_service: Resource linked service reference. - :type resource_linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference - :param folder_path: Folder path for resource files Type: string (or Expression with resultType - string). - :type folder_path: object - :param reference_objects: Reference objects. - :type reference_objects: ~azure.synapse.artifacts.models.CustomActivityReferenceObject - :param extended_properties: User defined property bag. There is no restriction on the keys or - values that can be used. The user specified custom activity has the full responsibility to - consume and interpret the content defined. - :type extended_properties: dict[str, object] - :param retention_time_in_days: The retention time for the files submitted for custom activity. - Type: double (or Expression with resultType double). - :type retention_time_in_days: object + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param index_name: Required. The name of the Azure Search Index. Type: string (or Expression + with resultType string). + :type index_name: object """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, - 'command': {'required': True}, + 'linked_service_name': {'required': True}, + 'index_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'command': {'key': 'typeProperties.command', 'type': 'object'}, - 'resource_linked_service': {'key': 'typeProperties.resourceLinkedService', 'type': 'LinkedServiceReference'}, - 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, - 'reference_objects': {'key': 'typeProperties.referenceObjects', 'type': 'CustomActivityReferenceObject'}, - 'extended_properties': {'key': 'typeProperties.extendedProperties', 'type': '{object}'}, - 'retention_time_in_days': {'key': 'typeProperties.retentionTimeInDays', 'type': 'object'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'index_name': {'key': 'typeProperties.indexName', 'type': 'object'}, } def __init__( self, *, - name: str, - command: object, + linked_service_name: "LinkedServiceReference", + index_name: object, additional_properties: Optional[Dict[str, object]] = None, description: Optional[str] = None, - depends_on: Optional[List["ActivityDependency"]] = None, - user_properties: Optional[List["UserProperty"]] = None, - linked_service_name: Optional["LinkedServiceReference"] = None, - policy: Optional["ActivityPolicy"] = None, - resource_linked_service: Optional["LinkedServiceReference"] = None, - folder_path: Optional[object] = None, - reference_objects: Optional["CustomActivityReferenceObject"] = None, - extended_properties: Optional[Dict[str, object]] = None, - retention_time_in_days: Optional[object] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, **kwargs ): - super(CustomActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type: str = 'Custom' - self.command = command - self.resource_linked_service = resource_linked_service - self.folder_path = folder_path - self.reference_objects = reference_objects - self.extended_properties = extended_properties - self.retention_time_in_days = retention_time_in_days + super(AzureSearchIndexDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'AzureSearchIndex' # type: str + self.index_name = index_name -class CustomActivityReferenceObject(msrest.serialization.Model): - """Reference objects for custom activity. +class AzureSearchIndexSink(CopySink): + """A copy activity Azure Search Index sink. - :param linked_services: Linked service references. - :type linked_services: list[~azure.synapse.artifacts.models.LinkedServiceReference] - :param datasets: Dataset references. - :type datasets: list[~azure.synapse.artifacts.models.DatasetReference] + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param write_behavior: Specify the write behavior when upserting documents into Azure Search + Index. Possible values include: "Merge", "Upload". + :type write_behavior: str or ~azure.synapse.artifacts.models.AzureSearchIndexWriteBehaviorType """ + _validation = { + 'type': {'required': True}, + } + _attribute_map = { - 'linked_services': {'key': 'linkedServices', 'type': '[LinkedServiceReference]'}, - 'datasets': {'key': 'datasets', 'type': '[DatasetReference]'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, } def __init__( self, *, - linked_services: Optional[List["LinkedServiceReference"]] = None, - datasets: Optional[List["DatasetReference"]] = None, + additional_properties: Optional[Dict[str, object]] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + write_behavior: Optional[Union[str, "AzureSearchIndexWriteBehaviorType"]] = None, **kwargs ): - super(CustomActivityReferenceObject, self).__init__(**kwargs) - self.linked_services = linked_services - self.datasets = datasets + super(AzureSearchIndexSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'AzureSearchIndexSink' # type: str + self.write_behavior = write_behavior -class CustomDataSourceLinkedService(LinkedService): - """Custom linked service. +class AzureSearchLinkedService(LinkedService): + """Linked service for Windows Azure Search Service. All required parameters must be populated in order to send to Azure. @@ -5624,13 +5630,20 @@ class CustomDataSourceLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param type_properties: Required. Custom linked service properties. - :type type_properties: object + :param url: Required. URL for Azure Search service. Type: string (or Expression with resultType + string). + :type url: object + :param key: Admin Key for Azure Search service. + :type key: ~azure.synapse.artifacts.models.SecretBase + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object """ _validation = { 'type': {'required': True}, - 'type_properties': {'required': True}, + 'url': {'required': True}, } _attribute_map = { @@ -5640,1242 +5653,1763 @@ class CustomDataSourceLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'type_properties': {'key': 'typeProperties', 'type': 'object'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'key': {'key': 'typeProperties.key', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, *, - type_properties: object, + url: object, additional_properties: Optional[Dict[str, object]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, + key: Optional["SecretBase"] = None, + encrypted_credential: Optional[object] = None, **kwargs ): - super(CustomDataSourceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'CustomDataSource' - self.type_properties = type_properties + super(AzureSearchLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'AzureSearch' # type: str + self.url = url + self.key = key + self.encrypted_credential = encrypted_credential -class DatabricksNotebookActivity(ExecutionActivity): - """DatabricksNotebook activity. +class AzureSqlDatabaseLinkedService(LinkedService): + """Microsoft Azure SQL Database linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. Type of linked service.Constant filled by server. :type type: str - :param description: Activity description. + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param notebook_path: Required. The absolute path of the notebook to be run in the Databricks - Workspace. This path must begin with a slash. Type: string (or Expression with resultType - string). - :type notebook_path: object - :param base_parameters: Base parameters to be used for each run of this job.If the notebook - takes a parameter that is not specified, the default value from the notebook will be used. - :type base_parameters: dict[str, object] - :param libraries: A list of libraries to be installed on the cluster that will execute the job. - :type libraries: list[dict[str, object]] + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param connection_string: Required. The connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param password: The Azure key vault secret reference of password in connection string. + :type password: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param service_principal_id: The ID of the service principal used to authenticate against Azure + SQL Database. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The key of the service principal used to authenticate against + Azure SQL Database. + :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal belongs. Type: + string (or Expression with resultType string). + :type tenant: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, - 'notebook_path': {'required': True}, + 'connection_string': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'notebook_path': {'key': 'typeProperties.notebookPath', 'type': 'object'}, - 'base_parameters': {'key': 'typeProperties.baseParameters', 'type': '{object}'}, - 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, *, - name: str, - notebook_path: object, + connection_string: object, additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, - depends_on: Optional[List["ActivityDependency"]] = None, - user_properties: Optional[List["UserProperty"]] = None, - linked_service_name: Optional["LinkedServiceReference"] = None, - policy: Optional["ActivityPolicy"] = None, - base_parameters: Optional[Dict[str, object]] = None, - libraries: Optional[List[Dict[str, object]]] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + password: Optional["AzureKeyVaultSecretReference"] = None, + service_principal_id: Optional[object] = None, + service_principal_key: Optional["SecretBase"] = None, + tenant: Optional[object] = None, + encrypted_credential: Optional[object] = None, **kwargs ): - super(DatabricksNotebookActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type: str = 'DatabricksNotebook' - self.notebook_path = notebook_path - self.base_parameters = base_parameters - self.libraries = libraries + super(AzureSqlDatabaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'AzureSqlDatabase' # type: str + self.connection_string = connection_string + self.password = password + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.encrypted_credential = encrypted_credential -class DatabricksSparkJarActivity(ExecutionActivity): - """DatabricksSparkJar activity. +class AzureSqlDWLinkedService(LinkedService): + """Azure SQL Data Warehouse linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. Type of linked service.Constant filled by server. :type type: str - :param description: Activity description. + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param main_class_name: Required. The full name of the class containing the main method to be - executed. This class must be contained in a JAR provided as a library. Type: string (or - Expression with resultType string). - :type main_class_name: object - :param parameters: Parameters that will be passed to the main method. - :type parameters: list[object] - :param libraries: A list of libraries to be installed on the cluster that will execute the job. - :type libraries: list[dict[str, object]] - """ - - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'main_class_name': {'required': True}, + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param connection_string: Required. The connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param password: The Azure key vault secret reference of password in connection string. + :type password: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param service_principal_id: The ID of the service principal used to authenticate against Azure + SQL Data Warehouse. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The key of the service principal used to authenticate against + Azure SQL Data Warehouse. + :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal belongs. Type: + string (or Expression with resultType string). + :type tenant: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'main_class_name': {'key': 'typeProperties.mainClassName', 'type': 'object'}, - 'parameters': {'key': 'typeProperties.parameters', 'type': '[object]'}, - 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, *, - name: str, - main_class_name: object, + connection_string: object, additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, - depends_on: Optional[List["ActivityDependency"]] = None, - user_properties: Optional[List["UserProperty"]] = None, - linked_service_name: Optional["LinkedServiceReference"] = None, - policy: Optional["ActivityPolicy"] = None, - parameters: Optional[List[object]] = None, - libraries: Optional[List[Dict[str, object]]] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + password: Optional["AzureKeyVaultSecretReference"] = None, + service_principal_id: Optional[object] = None, + service_principal_key: Optional["SecretBase"] = None, + tenant: Optional[object] = None, + encrypted_credential: Optional[object] = None, **kwargs ): - super(DatabricksSparkJarActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type: str = 'DatabricksSparkJar' - self.main_class_name = main_class_name - self.parameters = parameters - self.libraries = libraries + super(AzureSqlDWLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'AzureSqlDW' # type: str + self.connection_string = connection_string + self.password = password + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.encrypted_credential = encrypted_credential -class DatabricksSparkPythonActivity(ExecutionActivity): - """DatabricksSparkPython activity. +class AzureSqlDWTableDataset(Dataset): + """The Azure SQL Data Warehouse dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. Type of dataset.Constant filled by server. :type type: str - :param description: Activity description. + :param description: Dataset description. :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param python_file: Required. The URI of the Python file to be executed. DBFS paths are - supported. Type: string (or Expression with resultType string). - :type python_file: object - :param parameters: Command line parameters that will be passed to the Python file. - :type parameters: list[object] - :param libraries: A list of libraries to be installed on the cluster that will execute the job. - :type libraries: list[dict[str, object]] + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: This property will be retired. Please consider using schema + table + properties instead. + :type table_name: object + :param schema_type_properties_schema: The schema name of the Azure SQL Data Warehouse. Type: + string (or Expression with resultType string). + :type schema_type_properties_schema: object + :param table: The table name of the Azure SQL Data Warehouse. Type: string (or Expression with + resultType string). + :type table: object """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, - 'python_file': {'required': True}, + 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'python_file': {'key': 'typeProperties.pythonFile', 'type': 'object'}, - 'parameters': {'key': 'typeProperties.parameters', 'type': '[object]'}, - 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, } def __init__( self, *, - name: str, - python_file: object, + linked_service_name: "LinkedServiceReference", additional_properties: Optional[Dict[str, object]] = None, description: Optional[str] = None, - depends_on: Optional[List["ActivityDependency"]] = None, - user_properties: Optional[List["UserProperty"]] = None, - linked_service_name: Optional["LinkedServiceReference"] = None, - policy: Optional["ActivityPolicy"] = None, - parameters: Optional[List[object]] = None, - libraries: Optional[List[Dict[str, object]]] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + table_name: Optional[object] = None, + schema_type_properties_schema: Optional[object] = None, + table: Optional[object] = None, **kwargs ): - super(DatabricksSparkPythonActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type: str = 'DatabricksSparkPython' - self.python_file = python_file - self.parameters = parameters - self.libraries = libraries - + super(AzureSqlDWTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'AzureSqlDWTable' # type: str + self.table_name = table_name + self.schema_type_properties_schema = schema_type_properties_schema + self.table = table -class DataFlow(msrest.serialization.Model): - """Azure Synapse nested object which contains a flow with data movements and transformations. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: MappingDataFlow. +class AzureSqlMILinkedService(LinkedService): + """Azure SQL Managed Instance linked service. All required parameters must be populated in order to send to Azure. - :param type: Required. Type of data flow.Constant filled by server. + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. :type type: str - :param description: The description of the data flow. + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. :type description: str - :param annotations: List of tags that can be used for describing the data flow. + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param folder: The folder that this data flow is in. If not specified, Data flow will appear at - the root level. - :type folder: ~azure.synapse.artifacts.models.DataFlowFolder + :param connection_string: Required. The connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param password: The Azure key vault secret reference of password in connection string. + :type password: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param service_principal_id: The ID of the service principal used to authenticate against Azure + SQL Managed Instance. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The key of the service principal used to authenticate against + Azure SQL Managed Instance. + :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal belongs. Type: + string (or Expression with resultType string). + :type tenant: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object """ _validation = { 'type': {'required': True}, + 'connection_string': {'required': True}, } _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DataFlowFolder'}, - } - - _subtype_map = { - 'type': {'MappingDataFlow': 'MappingDataFlow'} + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, *, + connection_string: object, + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, - folder: Optional["DataFlowFolder"] = None, + password: Optional["AzureKeyVaultSecretReference"] = None, + service_principal_id: Optional[object] = None, + service_principal_key: Optional["SecretBase"] = None, + tenant: Optional[object] = None, + encrypted_credential: Optional[object] = None, **kwargs ): - super(DataFlow, self).__init__(**kwargs) - self.type: Optional[str] = None - self.description = description - self.annotations = annotations - self.folder = folder + super(AzureSqlMILinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'AzureSqlMI' # type: str + self.connection_string = connection_string + self.password = password + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.encrypted_credential = encrypted_credential -class DataFlowDebugCommandRequest(msrest.serialization.Model): - """Request body structure for data flow expression preview. +class AzureSqlMITableDataset(Dataset): + """The Azure SQL Managed Instance dataset. All required parameters must be populated in order to send to Azure. - :param session_id: Required. The ID of data flow debug session. - :type session_id: str - :param data_flow_name: The data flow which contains the debug session. - :type data_flow_name: str - :param command_name: The command name. - :type command_name: str - :param command_payload: Required. The command payload object. - :type command_payload: object - """ - - _validation = { - 'session_id': {'required': True}, - 'command_payload': {'required': True}, + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: This property will be retired. Please consider using schema + table + properties instead. + :type table_name: object + :param schema_type_properties_schema: The schema name of the Azure SQL Managed Instance. Type: + string (or Expression with resultType string). + :type schema_type_properties_schema: object + :param table: The table name of the Azure SQL Managed Instance dataset. Type: string (or + Expression with resultType string). + :type table: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, } _attribute_map = { - 'session_id': {'key': 'sessionId', 'type': 'str'}, - 'data_flow_name': {'key': 'dataFlowName', 'type': 'str'}, - 'command_name': {'key': 'commandName', 'type': 'str'}, - 'command_payload': {'key': 'commandPayload', 'type': 'object'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, } def __init__( self, *, - session_id: str, - command_payload: object, - data_flow_name: Optional[str] = None, - command_name: Optional[str] = None, + linked_service_name: "LinkedServiceReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + table_name: Optional[object] = None, + schema_type_properties_schema: Optional[object] = None, + table: Optional[object] = None, **kwargs ): - super(DataFlowDebugCommandRequest, self).__init__(**kwargs) - self.session_id = session_id - self.data_flow_name = data_flow_name - self.command_name = command_name - self.command_payload = command_payload + super(AzureSqlMITableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'AzureSqlMITable' # type: str + self.table_name = table_name + self.schema_type_properties_schema = schema_type_properties_schema + self.table = table -class DataFlowDebugCommandResponse(msrest.serialization.Model): - """Response body structure of data flow result for data preview, statistics or expression preview. +class AzureSqlSink(CopySink): + """A copy activity Azure SQL sink. - :param status: The run status of data preview, statistics or expression preview. - :type status: str - :param data: The result data of data preview, statistics or expression preview. - :type data: str + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or + Expression with resultType string). + :type sql_writer_stored_procedure_name: object + :param sql_writer_table_type: SQL writer table type. Type: string (or Expression with + resultType string). + :type sql_writer_table_type: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType + string). + :type pre_copy_script: object + :param stored_procedure_parameters: SQL stored procedure parameters. + :type stored_procedure_parameters: dict[str, + ~azure.synapse.artifacts.models.StoredProcedureParameter] + :param stored_procedure_table_type_parameter_name: The stored procedure parameter name of the + table type. Type: string (or Expression with resultType string). + :type stored_procedure_table_type_parameter_name: object + :param table_option: The option to handle sink table, such as autoCreate. For now only + 'autoCreate' value is supported. Type: string (or Expression with resultType string). + :type table_option: object """ + _validation = { + 'type': {'required': True}, + } + _attribute_map = { - 'status': {'key': 'status', 'type': 'str'}, - 'data': {'key': 'data', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, + 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, } def __init__( self, *, - status: Optional[str] = None, - data: Optional[str] = None, + additional_properties: Optional[Dict[str, object]] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + sql_writer_stored_procedure_name: Optional[object] = None, + sql_writer_table_type: Optional[object] = None, + pre_copy_script: Optional[object] = None, + stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None, + stored_procedure_table_type_parameter_name: Optional[object] = None, + table_option: Optional[object] = None, **kwargs ): - super(DataFlowDebugCommandResponse, self).__init__(**kwargs) - self.status = status - self.data = data + super(AzureSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'AzureSqlSink' # type: str + self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name + self.sql_writer_table_type = sql_writer_table_type + self.pre_copy_script = pre_copy_script + self.stored_procedure_parameters = stored_procedure_parameters + self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name + self.table_option = table_option -class DataFlowDebugPackage(msrest.serialization.Model): - """Request body structure for starting data flow debug session. +class AzureSqlSource(TabularSource): + """A copy activity Azure SQL source. + + All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param session_id: The ID of data flow debug session. - :type session_id: str - :param data_flow: Data flow instance. - :type data_flow: ~azure.synapse.artifacts.models.DataFlowDebugResource - :param datasets: List of datasets. - :type datasets: list[~azure.synapse.artifacts.models.DatasetDebugResource] - :param linked_services: List of linked services. - :type linked_services: list[~azure.synapse.artifacts.models.LinkedServiceDebugResource] - :param staging: Staging info for debug session. - :type staging: ~azure.synapse.artifacts.models.DataFlowStagingInfo - :param debug_settings: Data flow debug settings. - :type debug_settings: ~azure.synapse.artifacts.models.DataFlowDebugPackageDebugSettings + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database + source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression + with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored procedure parameters. + Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.synapse.artifacts.models.StoredProcedureParameter] + :param produce_additional_types: Which additional types to produce. + :type produce_additional_types: object """ + _validation = { + 'type': {'required': True}, + } + _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'session_id': {'key': 'sessionId', 'type': 'str'}, - 'data_flow': {'key': 'dataFlow', 'type': 'DataFlowDebugResource'}, - 'datasets': {'key': 'datasets', 'type': '[DatasetDebugResource]'}, - 'linked_services': {'key': 'linkedServices', 'type': '[LinkedServiceDebugResource]'}, - 'staging': {'key': 'staging', 'type': 'DataFlowStagingInfo'}, - 'debug_settings': {'key': 'debugSettings', 'type': 'DataFlowDebugPackageDebugSettings'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, } def __init__( self, *, additional_properties: Optional[Dict[str, object]] = None, - session_id: Optional[str] = None, - data_flow: Optional["DataFlowDebugResource"] = None, - datasets: Optional[List["DatasetDebugResource"]] = None, - linked_services: Optional[List["LinkedServiceDebugResource"]] = None, - staging: Optional["DataFlowStagingInfo"] = None, - debug_settings: Optional["DataFlowDebugPackageDebugSettings"] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + sql_reader_query: Optional[object] = None, + sql_reader_stored_procedure_name: Optional[object] = None, + stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None, + produce_additional_types: Optional[object] = None, **kwargs ): - super(DataFlowDebugPackage, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.session_id = session_id - self.data_flow = data_flow - self.datasets = datasets - self.linked_services = linked_services - self.staging = staging - self.debug_settings = debug_settings + super(AzureSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'AzureSqlSource' # type: str + self.sql_reader_query = sql_reader_query + self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name + self.stored_procedure_parameters = stored_procedure_parameters + self.produce_additional_types = produce_additional_types -class DataFlowDebugPackageDebugSettings(msrest.serialization.Model): - """Data flow debug settings. +class AzureSqlTableDataset(Dataset): + """The Azure SQL Server database dataset. - :param source_settings: Source setting for data flow debug. - :type source_settings: list[~azure.synapse.artifacts.models.DataFlowSourceSetting] - :param parameters: Data flow parameters. - :type parameters: dict[str, object] - :param dataset_parameters: Parameters for dataset. - :type dataset_parameters: object + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: This property will be retired. Please consider using schema + table + properties instead. + :type table_name: object + :param schema_type_properties_schema: The schema name of the Azure SQL database. Type: string + (or Expression with resultType string). + :type schema_type_properties_schema: object + :param table: The table name of the Azure SQL database. Type: string (or Expression with + resultType string). + :type table: object """ - _attribute_map = { - 'source_settings': {'key': 'sourceSettings', 'type': '[DataFlowSourceSetting]'}, - 'parameters': {'key': 'parameters', 'type': '{object}'}, - 'dataset_parameters': {'key': 'datasetParameters', 'type': 'object'}, + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, } - def __init__( - self, - *, - source_settings: Optional[List["DataFlowSourceSetting"]] = None, - parameters: Optional[Dict[str, object]] = None, - dataset_parameters: Optional[object] = None, - **kwargs - ): - super(DataFlowDebugPackageDebugSettings, self).__init__(**kwargs) - self.source_settings = source_settings - self.parameters = parameters - self.dataset_parameters = dataset_parameters - - -class DataFlowDebugPreviewDataRequest(msrest.serialization.Model): - """Request body structure for data flow preview data. - - :param session_id: The ID of data flow debug session. - :type session_id: str - :param data_flow_name: The data flow which contains the debug session. - :type data_flow_name: str - :param stream_name: The output stream name. - :type stream_name: str - :param row_limits: The row limit for preview request. - :type row_limits: int - """ - _attribute_map = { - 'session_id': {'key': 'sessionId', 'type': 'str'}, - 'data_flow_name': {'key': 'dataFlowName', 'type': 'str'}, - 'stream_name': {'key': 'streamName', 'type': 'str'}, - 'row_limits': {'key': 'rowLimits', 'type': 'int'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, } def __init__( self, *, - session_id: Optional[str] = None, - data_flow_name: Optional[str] = None, - stream_name: Optional[str] = None, - row_limits: Optional[int] = None, + linked_service_name: "LinkedServiceReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + table_name: Optional[object] = None, + schema_type_properties_schema: Optional[object] = None, + table: Optional[object] = None, **kwargs ): - super(DataFlowDebugPreviewDataRequest, self).__init__(**kwargs) - self.session_id = session_id - self.data_flow_name = data_flow_name - self.stream_name = stream_name - self.row_limits = row_limits + super(AzureSqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'AzureSqlTable' # type: str + self.table_name = table_name + self.schema_type_properties_schema = schema_type_properties_schema + self.table = table -class DataFlowDebugQueryResponse(msrest.serialization.Model): - """Response body structure of data flow query for data preview, statistics or expression preview. +class AzureStorageLinkedService(LinkedService): + """The storage account linked service. - :param run_id: The run ID of data flow debug session. - :type run_id: str + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param connection_string: The connection string. It is mutually exclusive with sasUri property. + Type: string, SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param account_key: The Azure key vault secret reference of accountKey in connection string. + :type account_key: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param sas_uri: SAS URI of the Azure Storage resource. It is mutually exclusive with + connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. + :type sas_uri: object + :param sas_token: The Azure key vault secret reference of sasToken in sas uri. + :type sas_token: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: str """ + _validation = { + 'type': {'required': True}, + } + _attribute_map = { - 'run_id': {'key': 'runId', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, + 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, + 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, } def __init__( self, *, - run_id: Optional[str] = None, + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + connection_string: Optional[object] = None, + account_key: Optional["AzureKeyVaultSecretReference"] = None, + sas_uri: Optional[object] = None, + sas_token: Optional["AzureKeyVaultSecretReference"] = None, + encrypted_credential: Optional[str] = None, **kwargs ): - super(DataFlowDebugQueryResponse, self).__init__(**kwargs) - self.run_id = run_id + super(AzureStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'AzureStorage' # type: str + self.connection_string = connection_string + self.account_key = account_key + self.sas_uri = sas_uri + self.sas_token = sas_token + self.encrypted_credential = encrypted_credential -class SubResourceDebugResource(msrest.serialization.Model): - """Azure Synapse nested debug resource. +class AzureTableDataset(Dataset): + """The Azure Table storage dataset. - :param name: The resource name. - :type name: str + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: Required. The table name of the Azure Table storage. Type: string (or + Expression with resultType string). + :type table_name: object """ + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + 'table_name': {'required': True}, + } + _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } def __init__( self, *, - name: Optional[str] = None, + linked_service_name: "LinkedServiceReference", + table_name: object, + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, **kwargs ): - super(SubResourceDebugResource, self).__init__(**kwargs) - self.name = name + super(AzureTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'AzureTable' # type: str + self.table_name = table_name -class DataFlowDebugResource(SubResourceDebugResource): - """Data flow debug resource. +class AzureTableSink(CopySink): + """A copy activity Azure Table sink. All required parameters must be populated in order to send to Azure. - :param name: The resource name. - :type name: str - :param properties: Required. Data flow properties. - :type properties: ~azure.synapse.artifacts.models.DataFlow + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param azure_table_default_partition_key_value: Azure Table default partition key value. Type: + string (or Expression with resultType string). + :type azure_table_default_partition_key_value: object + :param azure_table_partition_key_name: Azure Table partition key name. Type: string (or + Expression with resultType string). + :type azure_table_partition_key_name: object + :param azure_table_row_key_name: Azure Table row key name. Type: string (or Expression with + resultType string). + :type azure_table_row_key_name: object + :param azure_table_insert_type: Azure Table insert type. Type: string (or Expression with + resultType string). + :type azure_table_insert_type: object """ _validation = { - 'properties': {'required': True}, + 'type': {'required': True}, } _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'DataFlow'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'azure_table_default_partition_key_value': {'key': 'azureTableDefaultPartitionKeyValue', 'type': 'object'}, + 'azure_table_partition_key_name': {'key': 'azureTablePartitionKeyName', 'type': 'object'}, + 'azure_table_row_key_name': {'key': 'azureTableRowKeyName', 'type': 'object'}, + 'azure_table_insert_type': {'key': 'azureTableInsertType', 'type': 'object'}, } def __init__( self, *, - properties: "DataFlow", - name: Optional[str] = None, + additional_properties: Optional[Dict[str, object]] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + azure_table_default_partition_key_value: Optional[object] = None, + azure_table_partition_key_name: Optional[object] = None, + azure_table_row_key_name: Optional[object] = None, + azure_table_insert_type: Optional[object] = None, **kwargs ): - super(DataFlowDebugResource, self).__init__(name=name, **kwargs) - self.properties = properties + super(AzureTableSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'AzureTableSink' # type: str + self.azure_table_default_partition_key_value = azure_table_default_partition_key_value + self.azure_table_partition_key_name = azure_table_partition_key_name + self.azure_table_row_key_name = azure_table_row_key_name + self.azure_table_insert_type = azure_table_insert_type -class DataFlowDebugResultResponse(msrest.serialization.Model): - """Response body structure of data flow result for data preview, statistics or expression preview. +class AzureTableSource(TabularSource): + """A copy activity Azure Table source. - :param status: The run status of data preview, statistics or expression preview. - :type status: str - :param data: The result data of data preview, statistics or expression preview. - :type data: str - """ - - _attribute_map = { - 'status': {'key': 'status', 'type': 'str'}, - 'data': {'key': 'data', 'type': 'str'}, - } - - def __init__( - self, - *, - status: Optional[str] = None, - data: Optional[str] = None, - **kwargs - ): - super(DataFlowDebugResultResponse, self).__init__(**kwargs) - self.status = status - self.data = data - - -class DataFlowDebugSessionInfo(msrest.serialization.Model): - """Data flow debug session info. + All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param data_flow_name: The name of the data flow. - :type data_flow_name: str - :param compute_type: Compute type of the cluster. - :type compute_type: str - :param core_count: Core count of the cluster. - :type core_count: int - :param node_count: Node count of the cluster. (deprecated property). - :type node_count: int - :param integration_runtime_name: Attached integration runtime name of data flow debug session. - :type integration_runtime_name: str - :param session_id: The ID of data flow debug session. - :type session_id: str - :param start_time: Start time of data flow debug session. - :type start_time: str - :param time_to_live_in_minutes: Compute type of the cluster. - :type time_to_live_in_minutes: int - :param last_activity_time: Last activity time of data flow debug session. - :type last_activity_time: str - """ - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'data_flow_name': {'key': 'dataFlowName', 'type': 'str'}, - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'core_count': {'key': 'coreCount', 'type': 'int'}, - 'node_count': {'key': 'nodeCount', 'type': 'int'}, - 'integration_runtime_name': {'key': 'integrationRuntimeName', 'type': 'str'}, - 'session_id': {'key': 'sessionId', 'type': 'str'}, - 'start_time': {'key': 'startTime', 'type': 'str'}, - 'time_to_live_in_minutes': {'key': 'timeToLiveInMinutes', 'type': 'int'}, - 'last_activity_time': {'key': 'lastActivityTime', 'type': 'str'}, - } - - def __init__( - self, - *, - additional_properties: Optional[Dict[str, object]] = None, - data_flow_name: Optional[str] = None, - compute_type: Optional[str] = None, - core_count: Optional[int] = None, - node_count: Optional[int] = None, - integration_runtime_name: Optional[str] = None, - session_id: Optional[str] = None, - start_time: Optional[str] = None, - time_to_live_in_minutes: Optional[int] = None, - last_activity_time: Optional[str] = None, - **kwargs - ): - super(DataFlowDebugSessionInfo, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.data_flow_name = data_flow_name - self.compute_type = compute_type - self.core_count = core_count - self.node_count = node_count - self.integration_runtime_name = integration_runtime_name - self.session_id = session_id - self.start_time = start_time - self.time_to_live_in_minutes = time_to_live_in_minutes - self.last_activity_time = last_activity_time - - -class DataFlowDebugStatisticsRequest(msrest.serialization.Model): - """Request body structure for data flow statistics. - - :param session_id: The ID of data flow debug session. - :type session_id: str - :param data_flow_name: The data flow which contains the debug session. - :type data_flow_name: str - :param stream_name: The output stream name. - :type stream_name: str - :param columns: List of column names. - :type columns: list[str] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param azure_table_source_query: Azure Table source query. Type: string (or Expression with + resultType string). + :type azure_table_source_query: object + :param azure_table_source_ignore_table_not_found: Azure Table source ignore table not found. + Type: boolean (or Expression with resultType boolean). + :type azure_table_source_ignore_table_not_found: object """ - _attribute_map = { - 'session_id': {'key': 'sessionId', 'type': 'str'}, - 'data_flow_name': {'key': 'dataFlowName', 'type': 'str'}, - 'stream_name': {'key': 'streamName', 'type': 'str'}, - 'columns': {'key': 'columns', 'type': '[str]'}, + _validation = { + 'type': {'required': True}, } - def __init__( - self, - *, - session_id: Optional[str] = None, - data_flow_name: Optional[str] = None, - stream_name: Optional[str] = None, - columns: Optional[List[str]] = None, - **kwargs - ): - super(DataFlowDebugStatisticsRequest, self).__init__(**kwargs) - self.session_id = session_id - self.data_flow_name = data_flow_name - self.stream_name = stream_name - self.columns = columns - - -class DataFlowFolder(msrest.serialization.Model): - """The folder that this data flow is in. If not specified, Data flow will appear at the root level. - - :param name: The name of the folder that this data flow is in. - :type name: str - """ - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'azure_table_source_query': {'key': 'azureTableSourceQuery', 'type': 'object'}, + 'azure_table_source_ignore_table_not_found': {'key': 'azureTableSourceIgnoreTableNotFound', 'type': 'object'}, } def __init__( self, *, - name: Optional[str] = None, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + azure_table_source_query: Optional[object] = None, + azure_table_source_ignore_table_not_found: Optional[object] = None, **kwargs ): - super(DataFlowFolder, self).__init__(**kwargs) - self.name = name + super(AzureTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'AzureTableSource' # type: str + self.azure_table_source_query = azure_table_source_query + self.azure_table_source_ignore_table_not_found = azure_table_source_ignore_table_not_found -class DataFlowListResponse(msrest.serialization.Model): - """A list of data flow resources. +class AzureTableStorageLinkedService(LinkedService): + """The azure table storage linked service. All required parameters must be populated in order to send to Azure. - :param value: Required. List of data flows. - :type value: list[~azure.synapse.artifacts.models.DataFlowResource] - :param next_link: The link to the next page of results, if any remaining results exist. - :type next_link: str + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param connection_string: The connection string. It is mutually exclusive with sasUri property. + Type: string, SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param account_key: The Azure key vault secret reference of accountKey in connection string. + :type account_key: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param sas_uri: SAS URI of the Azure Storage resource. It is mutually exclusive with + connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. + :type sas_uri: object + :param sas_token: The Azure key vault secret reference of sasToken in sas uri. + :type sas_token: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: str """ _validation = { - 'value': {'required': True}, + 'type': {'required': True}, } _attribute_map = { - 'value': {'key': 'value', 'type': '[DataFlowResource]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, + 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, + 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, } def __init__( self, *, - value: List["DataFlowResource"], - next_link: Optional[str] = None, + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + connection_string: Optional[object] = None, + account_key: Optional["AzureKeyVaultSecretReference"] = None, + sas_uri: Optional[object] = None, + sas_token: Optional["AzureKeyVaultSecretReference"] = None, + encrypted_credential: Optional[str] = None, **kwargs ): - super(DataFlowListResponse, self).__init__(**kwargs) - self.value = value - self.next_link = next_link - + super(AzureTableStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'AzureTableStorage' # type: str + self.connection_string = connection_string + self.account_key = account_key + self.sas_uri = sas_uri + self.sas_token = sas_token + self.encrypted_credential = encrypted_credential -class DataFlowReference(msrest.serialization.Model): - """Data flow reference type. - Variables are only populated by the server, and will be ignored when sending a request. +class BigDataPoolReference(msrest.serialization.Model): + """Big data pool reference. All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :ivar type: Required. Data flow reference type. Default value: "DataFlowReference". - :vartype type: str - :param reference_name: Required. Reference data flow name. + :param type: Required. Big data pool reference type. Possible values include: + "BigDataPoolReference". + :type type: str or ~azure.synapse.artifacts.models.BigDataPoolReferenceType + :param reference_name: Required. Reference big data pool name. :type reference_name: str - :param dataset_parameters: Reference data flow parameters from dataset. - :type dataset_parameters: object """ _validation = { - 'type': {'required': True, 'constant': True}, + 'type': {'required': True}, 'reference_name': {'required': True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'reference_name': {'key': 'referenceName', 'type': 'str'}, - 'dataset_parameters': {'key': 'datasetParameters', 'type': 'object'}, } - type = "DataFlowReference" - def __init__( self, *, + type: Union[str, "BigDataPoolReferenceType"], reference_name: str, - additional_properties: Optional[Dict[str, object]] = None, - dataset_parameters: Optional[object] = None, **kwargs ): - super(DataFlowReference, self).__init__(**kwargs) - self.additional_properties = additional_properties + super(BigDataPoolReference, self).__init__(**kwargs) + self.type = type self.reference_name = reference_name - self.dataset_parameters = dataset_parameters -class SubResource(msrest.serialization.Model): - """Azure Synapse nested resource, which belongs to a workspace. +class TrackedResource(Resource): + """The resource model definition for a ARM tracked top level resource. Variables are only populated by the server, and will be ignored when sending a request. - :ivar id: The resource identifier. + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. :vartype id: str - :ivar name: The resource name. + :ivar name: The name of the resource. :vartype name: str - :ivar type: The resource type. + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str + :param tags: A set of tags. Resource tags. + :type tags: dict[str, str] + :param location: Required. The geo-location where the resource lives. + :type location: str """ _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, 'type': {'readonly': True}, - 'etag': {'readonly': True}, + 'location': {'required': True}, } _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'location': {'key': 'location', 'type': 'str'}, } def __init__( self, - **kwargs + *, + location: str, + tags: Optional[Dict[str, str]] = None, + **kwargs ): - super(SubResource, self).__init__(**kwargs) - self.id = None - self.name = None - self.type = None - self.etag = None + super(TrackedResource, self).__init__(**kwargs) + self.tags = tags + self.location = location -class DataFlowResource(SubResource): - """Data flow resource type. +class BigDataPoolResourceInfo(TrackedResource): + """A Big Data pool. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :ivar id: The resource identifier. + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. :vartype id: str - :ivar name: The resource name. + :ivar name: The name of the resource. :vartype name: str - :ivar type: The resource type. + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - :param properties: Required. Data flow properties. - :type properties: ~azure.synapse.artifacts.models.DataFlow + :param tags: A set of tags. Resource tags. + :type tags: dict[str, str] + :param location: Required. The geo-location where the resource lives. + :type location: str + :param provisioning_state: The state of the Big Data pool. + :type provisioning_state: str + :param auto_scale: Auto-scaling properties. + :type auto_scale: ~azure.synapse.artifacts.models.AutoScaleProperties + :param creation_date: The time when the Big Data pool was created. + :type creation_date: ~datetime.datetime + :param auto_pause: Auto-pausing properties. + :type auto_pause: ~azure.synapse.artifacts.models.AutoPauseProperties + :param is_compute_isolation_enabled: Whether compute isolation is required or not. + :type is_compute_isolation_enabled: bool + :param spark_events_folder: The Spark events folder. + :type spark_events_folder: str + :param node_count: The number of nodes in the Big Data pool. + :type node_count: int + :param library_requirements: Library version requirements. + :type library_requirements: ~azure.synapse.artifacts.models.LibraryRequirements + :param spark_version: The Apache Spark version. + :type spark_version: str + :param default_spark_log_folder: The default folder where Spark logs will be written. + :type default_spark_log_folder: str + :param node_size: The level of compute power that each node in the Big Data pool has. Possible + values include: "None", "Small", "Medium", "Large", "XLarge", "XXLarge". + :type node_size: str or ~azure.synapse.artifacts.models.NodeSize + :param node_size_family: The kind of nodes that the Big Data pool provides. Possible values + include: "None", "MemoryOptimized". + :type node_size_family: str or ~azure.synapse.artifacts.models.NodeSizeFamily """ _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'properties': {'required': True}, + 'location': {'required': True}, } _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'DataFlow'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'location': {'key': 'location', 'type': 'str'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + 'auto_scale': {'key': 'properties.autoScale', 'type': 'AutoScaleProperties'}, + 'creation_date': {'key': 'properties.creationDate', 'type': 'iso-8601'}, + 'auto_pause': {'key': 'properties.autoPause', 'type': 'AutoPauseProperties'}, + 'is_compute_isolation_enabled': {'key': 'properties.isComputeIsolationEnabled', 'type': 'bool'}, + 'spark_events_folder': {'key': 'properties.sparkEventsFolder', 'type': 'str'}, + 'node_count': {'key': 'properties.nodeCount', 'type': 'int'}, + 'library_requirements': {'key': 'properties.libraryRequirements', 'type': 'LibraryRequirements'}, + 'spark_version': {'key': 'properties.sparkVersion', 'type': 'str'}, + 'default_spark_log_folder': {'key': 'properties.defaultSparkLogFolder', 'type': 'str'}, + 'node_size': {'key': 'properties.nodeSize', 'type': 'str'}, + 'node_size_family': {'key': 'properties.nodeSizeFamily', 'type': 'str'}, } def __init__( self, *, - properties: "DataFlow", - **kwargs - ): - super(DataFlowResource, self).__init__(**kwargs) - self.properties = properties - + location: str, + tags: Optional[Dict[str, str]] = None, + provisioning_state: Optional[str] = None, + auto_scale: Optional["AutoScaleProperties"] = None, + creation_date: Optional[datetime.datetime] = None, + auto_pause: Optional["AutoPauseProperties"] = None, + is_compute_isolation_enabled: Optional[bool] = None, + spark_events_folder: Optional[str] = None, + node_count: Optional[int] = None, + library_requirements: Optional["LibraryRequirements"] = None, + spark_version: Optional[str] = None, + default_spark_log_folder: Optional[str] = None, + node_size: Optional[Union[str, "NodeSize"]] = None, + node_size_family: Optional[Union[str, "NodeSizeFamily"]] = None, + **kwargs + ): + super(BigDataPoolResourceInfo, self).__init__(tags=tags, location=location, **kwargs) + self.provisioning_state = provisioning_state + self.auto_scale = auto_scale + self.creation_date = creation_date + self.auto_pause = auto_pause + self.is_compute_isolation_enabled = is_compute_isolation_enabled + self.spark_events_folder = spark_events_folder + self.node_count = node_count + self.library_requirements = library_requirements + self.spark_version = spark_version + self.default_spark_log_folder = default_spark_log_folder + self.node_size = node_size + self.node_size_family = node_size_family -class Transformation(msrest.serialization.Model): - """A data flow transformation. - All required parameters must be populated in order to send to Azure. +class BigDataPoolResourceInfoListResult(msrest.serialization.Model): + """Collection of Big Data pool information. - :param name: Required. Transformation name. - :type name: str - :param description: Transformation description. - :type description: str + :param next_link: Link to the next page of results. + :type next_link: str + :param value: List of Big Data pools. + :type value: list[~azure.synapse.artifacts.models.BigDataPoolResourceInfo] """ - _validation = { - 'name': {'required': True}, - } - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'value': {'key': 'value', 'type': '[BigDataPoolResourceInfo]'}, } def __init__( self, *, - name: str, - description: Optional[str] = None, + next_link: Optional[str] = None, + value: Optional[List["BigDataPoolResourceInfo"]] = None, **kwargs ): - super(Transformation, self).__init__(**kwargs) - self.name = name - self.description = description + super(BigDataPoolResourceInfoListResult, self).__init__(**kwargs) + self.next_link = next_link + self.value = value -class DataFlowSink(Transformation): - """Transformation for data flow sink. +class BinaryDataset(Dataset): + """Binary dataset. All required parameters must be populated in order to send to Azure. - :param name: Required. Transformation name. - :type name: str - :param description: Transformation description. + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. :type description: str - :param dataset: Dataset reference. - :type dataset: ~azure.synapse.artifacts.models.DatasetReference + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param location: The location of the Binary storage. + :type location: ~azure.synapse.artifacts.models.DatasetLocation + :param compression: The data compression method used for the binary dataset. + :type compression: ~azure.synapse.artifacts.models.DatasetCompression """ _validation = { - 'name': {'required': True}, + 'type': {'required': True}, + 'linked_service_name': {'required': True}, } _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, - 'dataset': {'key': 'dataset', 'type': 'DatasetReference'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, } def __init__( self, *, - name: str, + linked_service_name: "LinkedServiceReference", + additional_properties: Optional[Dict[str, object]] = None, description: Optional[str] = None, - dataset: Optional["DatasetReference"] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + location: Optional["DatasetLocation"] = None, + compression: Optional["DatasetCompression"] = None, **kwargs ): - super(DataFlowSink, self).__init__(name=name, description=description, **kwargs) - self.dataset = dataset + super(BinaryDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'Binary' # type: str + self.location = location + self.compression = compression -class DataFlowSource(Transformation): - """Transformation for data flow source. +class BinarySink(CopySink): + """A copy activity Binary sink. All required parameters must be populated in order to send to Azure. - :param name: Required. Transformation name. - :type name: str - :param description: Transformation description. - :type description: str - :param dataset: Dataset reference. - :type dataset: ~azure.synapse.artifacts.models.DatasetReference + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param store_settings: Binary store settings. + :type store_settings: ~azure.synapse.artifacts.models.StoreWriteSettings """ _validation = { - 'name': {'required': True}, + 'type': {'required': True}, } _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'dataset': {'key': 'dataset', 'type': 'DatasetReference'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, } def __init__( self, *, - name: str, - description: Optional[str] = None, - dataset: Optional["DatasetReference"] = None, + additional_properties: Optional[Dict[str, object]] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + store_settings: Optional["StoreWriteSettings"] = None, **kwargs ): - super(DataFlowSource, self).__init__(name=name, description=description, **kwargs) - self.dataset = dataset + super(BinarySink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'BinarySink' # type: str + self.store_settings = store_settings -class DataFlowSourceSetting(msrest.serialization.Model): - """Definition of data flow source setting for debug. +class BinarySource(CopySource): + """A copy activity Binary source. + + All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param source_name: The data flow source name. - :type source_name: str - :param row_limit: Defines the row limit of data flow source in debug. - :type row_limit: int + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param store_settings: Binary store settings. + :type store_settings: ~azure.synapse.artifacts.models.StoreReadSettings """ + _validation = { + 'type': {'required': True}, + } + _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'source_name': {'key': 'sourceName', 'type': 'str'}, - 'row_limit': {'key': 'rowLimit', 'type': 'int'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, } def __init__( self, *, additional_properties: Optional[Dict[str, object]] = None, - source_name: Optional[str] = None, - row_limit: Optional[int] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + store_settings: Optional["StoreReadSettings"] = None, **kwargs ): - super(DataFlowSourceSetting, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.source_name = source_name - self.row_limit = row_limit - - -class DataFlowStagingInfo(msrest.serialization.Model): - """Staging info for execute data flow activity. - - :param linked_service: Staging linked service reference. - :type linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference - :param folder_path: Folder path for staging blob. - :type folder_path: str - """ + super(BinarySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'BinarySource' # type: str + self.store_settings = store_settings - _attribute_map = { - 'linked_service': {'key': 'linkedService', 'type': 'LinkedServiceReference'}, - 'folder_path': {'key': 'folderPath', 'type': 'str'}, - } - def __init__( - self, - *, - linked_service: Optional["LinkedServiceReference"] = None, - folder_path: Optional[str] = None, - **kwargs - ): - super(DataFlowStagingInfo, self).__init__(**kwargs) - self.linked_service = linked_service - self.folder_path = folder_path +class Trigger(msrest.serialization.Model): + """Azure Synapse nested object which contains information about creating pipeline run. + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: ChainingTrigger, MultiplePipelineTrigger, RerunTumblingWindowTrigger, TumblingWindowTrigger. -class DataLakeAnalyticsUSQLActivity(ExecutionActivity): - """Data Lake Analytics U-SQL activity. + Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. Trigger type.Constant filled by server. :type type: str - :param description: Activity description. + :param description: Trigger description. :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param script_path: Required. Case-sensitive path to folder that contains the U-SQL script. - Type: string (or Expression with resultType string). - :type script_path: object - :param script_linked_service: Required. Script linked service reference. - :type script_linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference - :param degree_of_parallelism: The maximum number of nodes simultaneously used to run the job. - Default value is 1. Type: integer (or Expression with resultType integer), minimum: 1. - :type degree_of_parallelism: object - :param priority: Determines which jobs out of all that are queued should be selected to run - first. The lower the number, the higher the priority. Default value is 1000. Type: integer (or - Expression with resultType integer), minimum: 1. - :type priority: object - :param parameters: Parameters for U-SQL job request. - :type parameters: dict[str, object] - :param runtime_version: Runtime version of the U-SQL engine to use. Type: string (or Expression - with resultType string). - :type runtime_version: object - :param compilation_mode: Compilation mode of U-SQL. Must be one of these values : Semantic, - Full and SingleBox. Type: string (or Expression with resultType string). - :type compilation_mode: object + :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are + called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". + :vartype runtime_state: str or ~azure.synapse.artifacts.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the trigger. + :type annotations: list[object] """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, - 'script_path': {'required': True}, - 'script_linked_service': {'required': True}, + 'runtime_state': {'readonly': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'script_path': {'key': 'typeProperties.scriptPath', 'type': 'object'}, - 'script_linked_service': {'key': 'typeProperties.scriptLinkedService', 'type': 'LinkedServiceReference'}, - 'degree_of_parallelism': {'key': 'typeProperties.degreeOfParallelism', 'type': 'object'}, - 'priority': {'key': 'typeProperties.priority', 'type': 'object'}, - 'parameters': {'key': 'typeProperties.parameters', 'type': '{object}'}, - 'runtime_version': {'key': 'typeProperties.runtimeVersion', 'type': 'object'}, - 'compilation_mode': {'key': 'typeProperties.compilationMode', 'type': 'object'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + } + + _subtype_map = { + 'type': {'ChainingTrigger': 'ChainingTrigger', 'MultiplePipelineTrigger': 'MultiplePipelineTrigger', 'RerunTumblingWindowTrigger': 'RerunTumblingWindowTrigger', 'TumblingWindowTrigger': 'TumblingWindowTrigger'} } def __init__( self, *, - name: str, - script_path: object, - script_linked_service: "LinkedServiceReference", additional_properties: Optional[Dict[str, object]] = None, description: Optional[str] = None, - depends_on: Optional[List["ActivityDependency"]] = None, - user_properties: Optional[List["UserProperty"]] = None, - linked_service_name: Optional["LinkedServiceReference"] = None, - policy: Optional["ActivityPolicy"] = None, - degree_of_parallelism: Optional[object] = None, - priority: Optional[object] = None, - parameters: Optional[Dict[str, object]] = None, - runtime_version: Optional[object] = None, - compilation_mode: Optional[object] = None, + annotations: Optional[List[object]] = None, **kwargs ): - super(DataLakeAnalyticsUSQLActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type: str = 'DataLakeAnalyticsU-SQL' - self.script_path = script_path - self.script_linked_service = script_linked_service - self.degree_of_parallelism = degree_of_parallelism - self.priority = priority - self.parameters = parameters - self.runtime_version = runtime_version - self.compilation_mode = compilation_mode + super(Trigger, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = 'Trigger' # type: str + self.description = description + self.runtime_state = None + self.annotations = annotations -class DatasetCompression(msrest.serialization.Model): - """The compression method used on a dataset. +class MultiplePipelineTrigger(Trigger): + """Base class for all triggers that support one to many model for trigger to pipeline. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: DatasetBZip2Compression, DatasetDeflateCompression, DatasetGZipCompression, DatasetZipDeflateCompression. + sub-classes are: BlobEventsTrigger, BlobTrigger, ScheduleTrigger. + + Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset compression.Constant filled by server. + :param type: Required. Trigger type.Constant filled by server. :type type: str + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are + called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". + :vartype runtime_state: str or ~azure.synapse.artifacts.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the trigger. + :type annotations: list[object] + :param pipelines: Pipelines that need to be started. + :type pipelines: list[~azure.synapse.artifacts.models.TriggerPipelineReference] """ _validation = { 'type': {'required': True}, + 'runtime_state': {'readonly': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, } _subtype_map = { - 'type': {'BZip2': 'DatasetBZip2Compression', 'Deflate': 'DatasetDeflateCompression', 'GZip': 'DatasetGZipCompression', 'ZipDeflate': 'DatasetZipDeflateCompression'} + 'type': {'BlobEventsTrigger': 'BlobEventsTrigger', 'BlobTrigger': 'BlobTrigger', 'ScheduleTrigger': 'ScheduleTrigger'} } def __init__( self, *, additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + annotations: Optional[List[object]] = None, + pipelines: Optional[List["TriggerPipelineReference"]] = None, **kwargs ): - super(DatasetCompression, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.type: str = 'DatasetCompression' + super(MultiplePipelineTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) + self.type = 'MultiplePipelineTrigger' # type: str + self.pipelines = pipelines -class DatasetBZip2Compression(DatasetCompression): - """The BZip2 compression method used on a dataset. +class BlobEventsTrigger(MultiplePipelineTrigger): + """Trigger that runs every time a Blob event occurs. + + Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset compression.Constant filled by server. + :param type: Required. Trigger type.Constant filled by server. :type type: str + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are + called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". + :vartype runtime_state: str or ~azure.synapse.artifacts.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the trigger. + :type annotations: list[object] + :param pipelines: Pipelines that need to be started. + :type pipelines: list[~azure.synapse.artifacts.models.TriggerPipelineReference] + :param blob_path_begins_with: The blob path must begin with the pattern provided for trigger to + fire. For example, '/records/blobs/december/' will only fire the trigger for blobs in the + december folder under the records container. At least one of these must be provided: + blobPathBeginsWith, blobPathEndsWith. + :type blob_path_begins_with: str + :param blob_path_ends_with: The blob path must end with the pattern provided for trigger to + fire. For example, 'december/boxes.csv' will only fire the trigger for blobs named boxes in a + december folder. At least one of these must be provided: blobPathBeginsWith, blobPathEndsWith. + :type blob_path_ends_with: str + :param ignore_empty_blobs: If set to true, blobs with zero bytes will be ignored. + :type ignore_empty_blobs: bool + :param events: Required. The type of events that cause this trigger to fire. + :type events: list[str or ~azure.synapse.artifacts.models.BlobEventTypes] + :param scope: Required. The ARM resource ID of the Storage Account. + :type scope: str """ _validation = { 'type': {'required': True}, + 'runtime_state': {'readonly': True}, + 'events': {'required': True}, + 'scope': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, + 'blob_path_begins_with': {'key': 'typeProperties.blobPathBeginsWith', 'type': 'str'}, + 'blob_path_ends_with': {'key': 'typeProperties.blobPathEndsWith', 'type': 'str'}, + 'ignore_empty_blobs': {'key': 'typeProperties.ignoreEmptyBlobs', 'type': 'bool'}, + 'events': {'key': 'typeProperties.events', 'type': '[str]'}, + 'scope': {'key': 'typeProperties.scope', 'type': 'str'}, } def __init__( self, *, + events: List[Union[str, "BlobEventTypes"]], + scope: str, additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + annotations: Optional[List[object]] = None, + pipelines: Optional[List["TriggerPipelineReference"]] = None, + blob_path_begins_with: Optional[str] = None, + blob_path_ends_with: Optional[str] = None, + ignore_empty_blobs: Optional[bool] = None, **kwargs ): - super(DatasetBZip2Compression, self).__init__(additional_properties=additional_properties, **kwargs) - self.type: str = 'BZip2' + super(BlobEventsTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, pipelines=pipelines, **kwargs) + self.type = 'BlobEventsTrigger' # type: str + self.blob_path_begins_with = blob_path_begins_with + self.blob_path_ends_with = blob_path_ends_with + self.ignore_empty_blobs = ignore_empty_blobs + self.events = events + self.scope = scope -class DatasetDebugResource(SubResourceDebugResource): - """Dataset debug resource. +class BlobSink(CopySink): + """A copy activity Azure Blob sink. All required parameters must be populated in order to send to Azure. - :param name: The resource name. - :type name: str - :param properties: Required. Dataset properties. - :type properties: ~azure.synapse.artifacts.models.Dataset + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param blob_writer_overwrite_files: Blob writer overwrite files. Type: boolean (or Expression + with resultType boolean). + :type blob_writer_overwrite_files: object + :param blob_writer_date_time_format: Blob writer date time format. Type: string (or Expression + with resultType string). + :type blob_writer_date_time_format: object + :param blob_writer_add_header: Blob writer add header. Type: boolean (or Expression with + resultType boolean). + :type blob_writer_add_header: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object """ _validation = { - 'properties': {'required': True}, + 'type': {'required': True}, } _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'Dataset'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'blob_writer_overwrite_files': {'key': 'blobWriterOverwriteFiles', 'type': 'object'}, + 'blob_writer_date_time_format': {'key': 'blobWriterDateTimeFormat', 'type': 'object'}, + 'blob_writer_add_header': {'key': 'blobWriterAddHeader', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } def __init__( self, *, - properties: "Dataset", - name: Optional[str] = None, + additional_properties: Optional[Dict[str, object]] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + blob_writer_overwrite_files: Optional[object] = None, + blob_writer_date_time_format: Optional[object] = None, + blob_writer_add_header: Optional[object] = None, + copy_behavior: Optional[object] = None, **kwargs ): - super(DatasetDebugResource, self).__init__(name=name, **kwargs) - self.properties = properties + super(BlobSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'BlobSink' # type: str + self.blob_writer_overwrite_files = blob_writer_overwrite_files + self.blob_writer_date_time_format = blob_writer_date_time_format + self.blob_writer_add_header = blob_writer_add_header + self.copy_behavior = copy_behavior -class DatasetDeflateCompression(DatasetCompression): - """The Deflate compression method used on a dataset. +class BlobSource(CopySource): + """A copy activity Azure Blob source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset compression.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param level: The Deflate compression level. Possible values include: "Optimal", "Fastest". - :type level: str or ~azure.synapse.artifacts.models.DatasetCompressionLevel + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param treat_empty_as_null: Treat empty as null. Type: boolean (or Expression with resultType + boolean). + :type treat_empty_as_null: object + :param skip_header_line_count: Number of header lines to skip from each blob. Type: integer (or + Expression with resultType integer). + :type skip_header_line_count: object + :param recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :type recursive: object """ _validation = { @@ -6885,129 +7419,219 @@ class DatasetDeflateCompression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, + 'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, } def __init__( self, *, additional_properties: Optional[Dict[str, object]] = None, - level: Optional[Union[str, "DatasetCompressionLevel"]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + treat_empty_as_null: Optional[object] = None, + skip_header_line_count: Optional[object] = None, + recursive: Optional[object] = None, **kwargs ): - super(DatasetDeflateCompression, self).__init__(additional_properties=additional_properties, **kwargs) - self.type: str = 'Deflate' - self.level = level - - -class DatasetFolder(msrest.serialization.Model): - """The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - - :param name: The name of the folder that this Dataset is in. - :type name: str - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - } + super(BlobSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'BlobSource' # type: str + self.treat_empty_as_null = treat_empty_as_null + self.skip_header_line_count = skip_header_line_count + self.recursive = recursive - def __init__( - self, - *, - name: Optional[str] = None, - **kwargs - ): - super(DatasetFolder, self).__init__(**kwargs) - self.name = name +class BlobTrigger(MultiplePipelineTrigger): + """Trigger that runs every time the selected Blob container changes. -class DatasetGZipCompression(DatasetCompression): - """The GZip compression method used on a dataset. + Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset compression.Constant filled by server. + :param type: Required. Trigger type.Constant filled by server. :type type: str - :param level: The GZip compression level. Possible values include: "Optimal", "Fastest". - :type level: str or ~azure.synapse.artifacts.models.DatasetCompressionLevel + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are + called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". + :vartype runtime_state: str or ~azure.synapse.artifacts.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the trigger. + :type annotations: list[object] + :param pipelines: Pipelines that need to be started. + :type pipelines: list[~azure.synapse.artifacts.models.TriggerPipelineReference] + :param folder_path: Required. The path of the container/folder that will trigger the pipeline. + :type folder_path: str + :param max_concurrency: Required. The max number of parallel files to handle when it is + triggered. + :type max_concurrency: int + :param linked_service: Required. The Azure Storage linked service reference. + :type linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference """ _validation = { 'type': {'required': True}, + 'runtime_state': {'readonly': True}, + 'folder_path': {'required': True}, + 'max_concurrency': {'required': True}, + 'linked_service': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, + 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'str'}, + 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, + 'linked_service': {'key': 'typeProperties.linkedService', 'type': 'LinkedServiceReference'}, } def __init__( self, *, + folder_path: str, + max_concurrency: int, + linked_service: "LinkedServiceReference", additional_properties: Optional[Dict[str, object]] = None, - level: Optional[Union[str, "DatasetCompressionLevel"]] = None, - **kwargs - ): - super(DatasetGZipCompression, self).__init__(additional_properties=additional_properties, **kwargs) - self.type: str = 'GZip' - self.level = level + description: Optional[str] = None, + annotations: Optional[List[object]] = None, + pipelines: Optional[List["TriggerPipelineReference"]] = None, + **kwargs + ): + super(BlobTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, pipelines=pipelines, **kwargs) + self.type = 'BlobTrigger' # type: str + self.folder_path = folder_path + self.max_concurrency = max_concurrency + self.linked_service = linked_service -class DatasetListResponse(msrest.serialization.Model): - """A list of dataset resources. +class CassandraLinkedService(LinkedService): + """Linked service for Cassandra data source. All required parameters must be populated in order to send to Azure. - :param value: Required. List of datasets. - :type value: list[~azure.synapse.artifacts.models.DatasetResource] - :param next_link: The link to the next page of results, if any remaining results exist. - :type next_link: str + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param host: Required. Host name for connection. Type: string (or Expression with resultType + string). + :type host: object + :param authentication_type: AuthenticationType to be used for connection. Type: string (or + Expression with resultType string). + :type authentication_type: object + :param port: The port for the connection. Type: integer (or Expression with resultType + integer). + :type port: object + :param username: Username for authentication. Type: string (or Expression with resultType + string). + :type username: object + :param password: Password for authentication. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object """ _validation = { - 'value': {'required': True}, + 'type': {'required': True}, + 'host': {'required': True}, } _attribute_map = { - 'value': {'key': 'value', 'type': '[DatasetResource]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, *, - value: List["DatasetResource"], - next_link: Optional[str] = None, + host: object, + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + authentication_type: Optional[object] = None, + port: Optional[object] = None, + username: Optional[object] = None, + password: Optional["SecretBase"] = None, + encrypted_credential: Optional[object] = None, **kwargs ): - super(DatasetListResponse, self).__init__(**kwargs) - self.value = value - self.next_link = next_link - + super(CassandraLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'Cassandra' # type: str + self.host = host + self.authentication_type = authentication_type + self.port = port + self.username = username + self.password = password + self.encrypted_credential = encrypted_credential -class DatasetLocation(msrest.serialization.Model): - """Dataset location. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: . +class CassandraSource(TabularSource): + """A copy activity source for a Cassandra database. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or Expression with - resultType string). - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType - string). - :type file_name: object + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: Database query. Should be a SQL-92 query expression or Cassandra Query Language + (CQL) command. Type: string (or Expression with resultType string). + :type query: object + :param consistency_level: The consistency level specifies how many Cassandra servers must + respond to a read request before returning data to the client application. Cassandra checks the + specified number of Cassandra servers for data to satisfy the read request. Must be one of + cassandraSourceReadConsistencyLevels. The default value is 'ONE'. It is case-insensitive. + Possible values include: "ALL", "EACH_QUORUM", "QUORUM", "LOCAL_QUORUM", "ONE", "TWO", "THREE", + "LOCAL_ONE", "SERIAL", "LOCAL_SERIAL". + :type consistency_level: str or + ~azure.synapse.artifacts.models.CassandraSourceReadConsistencyLevels """ _validation = { @@ -7017,154 +7641,287 @@ class DatasetLocation(msrest.serialization.Model): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - } - - _subtype_map = { - 'type': {} + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + 'consistency_level': {'key': 'consistencyLevel', 'type': 'str'}, } def __init__( self, *, additional_properties: Optional[Dict[str, object]] = None, - folder_path: Optional[object] = None, - file_name: Optional[object] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + consistency_level: Optional[Union[str, "CassandraSourceReadConsistencyLevels"]] = None, **kwargs ): - super(DatasetLocation, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.type: str = 'DatasetLocation' - self.folder_path = folder_path - self.file_name = file_name - + super(CassandraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'CassandraSource' # type: str + self.query = query + self.consistency_level = consistency_level -class DatasetReference(msrest.serialization.Model): - """Dataset reference type. - Variables are only populated by the server, and will be ignored when sending a request. +class CassandraTableDataset(Dataset): + """The Cassandra database dataset. All required parameters must be populated in order to send to Azure. - :ivar type: Required. Dataset reference type. Default value: "DatasetReference". - :vartype type: str - :param reference_name: Required. Reference dataset name. - :type reference_name: str - :param parameters: Arguments for dataset. - :type parameters: dict[str, object] + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: The table name of the Cassandra database. Type: string (or Expression with + resultType string). + :type table_name: object + :param keyspace: The keyspace of the Cassandra database. Type: string (or Expression with + resultType string). + :type keyspace: object """ _validation = { - 'type': {'required': True, 'constant': True}, - 'reference_name': {'required': True}, + 'type': {'required': True}, + 'linked_service_name': {'required': True}, } _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'reference_name': {'key': 'referenceName', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'keyspace': {'key': 'typeProperties.keyspace', 'type': 'object'}, } - type = "DatasetReference" - def __init__( self, *, - reference_name: str, - parameters: Optional[Dict[str, object]] = None, + linked_service_name: "LinkedServiceReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + table_name: Optional[object] = None, + keyspace: Optional[object] = None, **kwargs ): - super(DatasetReference, self).__init__(**kwargs) - self.reference_name = reference_name - self.parameters = parameters + super(CassandraTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'CassandraTable' # type: str + self.table_name = table_name + self.keyspace = keyspace -class DatasetResource(SubResource): - """Dataset resource type. +class ChainingTrigger(Trigger): + """Trigger that allows the referenced pipeline to depend on other pipeline runs based on runDimension Name/Value pairs. Upstream pipelines should declare the same runDimension Name and their runs should have the values for those runDimensions. The referenced pipeline run would be triggered if the values for the runDimension match for all upstream pipeline runs. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - :param properties: Required. Dataset properties. - :type properties: ~azure.synapse.artifacts.models.Dataset + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Trigger type.Constant filled by server. + :type type: str + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are + called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". + :vartype runtime_state: str or ~azure.synapse.artifacts.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the trigger. + :type annotations: list[object] + :param pipeline: Required. Pipeline for which runs are created when all upstream pipelines + complete successfully. + :type pipeline: ~azure.synapse.artifacts.models.TriggerPipelineReference + :param depends_on: Required. Upstream Pipelines. + :type depends_on: list[~azure.synapse.artifacts.models.PipelineReference] + :param run_dimension: Required. Run Dimension property that needs to be emitted by upstream + pipelines. + :type run_dimension: str """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'properties': {'required': True}, + 'type': {'required': True}, + 'runtime_state': {'readonly': True}, + 'pipeline': {'required': True}, + 'depends_on': {'required': True}, + 'run_dimension': {'required': True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'Dataset'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'pipeline': {'key': 'pipeline', 'type': 'TriggerPipelineReference'}, + 'depends_on': {'key': 'typeProperties.dependsOn', 'type': '[PipelineReference]'}, + 'run_dimension': {'key': 'typeProperties.runDimension', 'type': 'str'}, } def __init__( self, *, - properties: "Dataset", + pipeline: "TriggerPipelineReference", + depends_on: List["PipelineReference"], + run_dimension: str, + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + annotations: Optional[List[object]] = None, **kwargs ): - super(DatasetResource, self).__init__(**kwargs) - self.properties = properties + super(ChainingTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) + self.type = 'ChainingTrigger' # type: str + self.pipeline = pipeline + self.depends_on = depends_on + self.run_dimension = run_dimension -class DatasetZipDeflateCompression(DatasetCompression): - """The ZipDeflate compression method used on a dataset. +class CloudError(msrest.serialization.Model): + """The object that defines the structure of an Azure Synapse error response. All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset compression.Constant filled by server. + :param code: Required. Error code. + :type code: str + :param message: Required. Error message. + :type message: str + :param target: Property name/path in request associated with error. + :type target: str + :param details: Array with additional error details. + :type details: list[~azure.synapse.artifacts.models.CloudError] + """ + + _validation = { + 'code': {'required': True}, + 'message': {'required': True}, + } + + _attribute_map = { + 'code': {'key': 'error.code', 'type': 'str'}, + 'message': {'key': 'error.message', 'type': 'str'}, + 'target': {'key': 'error.target', 'type': 'str'}, + 'details': {'key': 'error.details', 'type': '[CloudError]'}, + } + + def __init__( + self, + *, + code: str, + message: str, + target: Optional[str] = None, + details: Optional[List["CloudError"]] = None, + **kwargs + ): + super(CloudError, self).__init__(**kwargs) + self.code = code + self.message = message + self.target = target + self.details = details + + +class CommonDataServiceForAppsEntityDataset(Dataset): + """The Common Data Service for Apps entity dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. :type type: str - :param level: The ZipDeflate compression level. Possible values include: "Optimal", "Fastest". - :type level: str or ~azure.synapse.artifacts.models.DatasetCompressionLevel + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param entity_name: The logical name of the entity. Type: string (or Expression with resultType + string). + :type entity_name: object """ _validation = { 'type': {'required': True}, + 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, } def __init__( self, *, + linked_service_name: "LinkedServiceReference", additional_properties: Optional[Dict[str, object]] = None, - level: Optional[Union[str, "DatasetCompressionLevel"]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + entity_name: Optional[object] = None, **kwargs ): - super(DatasetZipDeflateCompression, self).__init__(additional_properties=additional_properties, **kwargs) - self.type: str = 'ZipDeflate' - self.level = level - + super(CommonDataServiceForAppsEntityDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'CommonDataServiceForAppsEntity' # type: str + self.entity_name = entity_name -class Db2LinkedService(LinkedService): - """Linked service for DB2 data source. - Variables are only populated by the server, and will be ignored when sending a request. +class CommonDataServiceForAppsLinkedService(LinkedService): + """Common Data Service for Apps linked service. All required parameters must be populated in order to send to Azure. @@ -7181,26 +7938,54 @@ class Db2LinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param server: Required. Server name for connection. Type: string (or Expression with + :param deployment_type: Required. The deployment type of the Common Data Service for Apps + instance. 'Online' for Common Data Service for Apps Online and 'OnPremisesWithIfd' for Common + Data Service for Apps on-premises with Ifd. Type: string (or Expression with resultType + string). Possible values include: "Online", "OnPremisesWithIfd". + :type deployment_type: str or ~azure.synapse.artifacts.models.DynamicsDeploymentType + :param host_name: The host name of the on-premises Common Data Service for Apps server. The + property is required for on-prem and not allowed for online. Type: string (or Expression with resultType string). - :type server: object - :param database: Required. Database name for connection. Type: string (or Expression with + :type host_name: object + :param port: The port of on-premises Common Data Service for Apps server. The property is + required for on-prem and not allowed for online. Default is 443. Type: integer (or Expression + with resultType integer), minimum: 0. + :type port: object + :param service_uri: The URL to the Microsoft Common Data Service for Apps server. The property + is required for on-line and not allowed for on-prem. Type: string (or Expression with resultType string). - :type database: object - :ivar authentication_type: AuthenticationType to be used for connection. Default value: - "Basic". - :vartype authentication_type: str - :param username: Username for authentication. Type: string (or Expression with resultType - string). + :type service_uri: object + :param organization_name: The organization name of the Common Data Service for Apps instance. + The property is required for on-prem and required for online when there are more than one + Common Data Service for Apps instances associated with the user. Type: string (or Expression + with resultType string). + :type organization_name: object + :param authentication_type: Required. The authentication type to connect to Common Data Service + for Apps server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario. + 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or + Expression with resultType string). Possible values include: "Office365", "Ifd", + "AADServicePrincipal". + :type authentication_type: str or ~azure.synapse.artifacts.models.DynamicsAuthenticationType + :param username: User name to access the Common Data Service for Apps instance. Type: string + (or Expression with resultType string). :type username: object - :param password: Password for authentication. + :param password: Password to access the Common Data Service for Apps instance. :type password: ~azure.synapse.artifacts.models.SecretBase - :param package_collection: Under where packages are created when querying database. Type: - string (or Expression with resultType string). - :type package_collection: object - :param certificate_common_name: Certificate Common Name when TLS is enabled. Type: string (or - Expression with resultType string). - :type certificate_common_name: object + :param service_principal_id: The client ID of the application in Azure Active Directory used + for Server-To-Server authentication. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_credential_type: The service principal credential type to use in + Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' + for certificate. Type: string (or Expression with resultType string). Possible values include: + "ServicePrincipalKey", "ServicePrincipalCert". + :type service_principal_credential_type: str or + ~azure.synapse.artifacts.models.DynamicsServicePrincipalCredentialType + :param service_principal_credential: The credential of the service principal object in Azure + Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', + servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If + servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only + be AzureKeyVaultSecretReference. + :type service_principal_credential: ~azure.synapse.artifacts.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -7209,9 +7994,8 @@ class Db2LinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'server': {'required': True}, - 'database': {'required': True}, - 'authentication_type': {'constant': True}, + 'deployment_type': {'required': True}, + 'authentication_type': {'required': True}, } _attribute_map = { @@ -7221,241 +8005,279 @@ class Db2LinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, + 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, + 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'package_collection': {'key': 'typeProperties.packageCollection', 'type': 'object'}, - 'certificate_common_name': {'key': 'typeProperties.certificateCommonName', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'str'}, + 'service_principal_credential': {'key': 'typeProperties.servicePrincipalCredential', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - authentication_type = "Basic" - def __init__( self, *, - server: object, - database: object, + deployment_type: Union[str, "DynamicsDeploymentType"], + authentication_type: Union[str, "DynamicsAuthenticationType"], additional_properties: Optional[Dict[str, object]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, + host_name: Optional[object] = None, + port: Optional[object] = None, + service_uri: Optional[object] = None, + organization_name: Optional[object] = None, username: Optional[object] = None, password: Optional["SecretBase"] = None, - package_collection: Optional[object] = None, - certificate_common_name: Optional[object] = None, + service_principal_id: Optional[object] = None, + service_principal_credential_type: Optional[Union[str, "DynamicsServicePrincipalCredentialType"]] = None, + service_principal_credential: Optional["SecretBase"] = None, encrypted_credential: Optional[object] = None, **kwargs ): - super(Db2LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Db2' - self.server = server - self.database = database + super(CommonDataServiceForAppsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'CommonDataServiceForApps' # type: str + self.deployment_type = deployment_type + self.host_name = host_name + self.port = port + self.service_uri = service_uri + self.organization_name = organization_name + self.authentication_type = authentication_type self.username = username self.password = password - self.package_collection = package_collection - self.certificate_common_name = certificate_common_name + self.service_principal_id = service_principal_id + self.service_principal_credential_type = service_principal_credential_type + self.service_principal_credential = service_principal_credential self.encrypted_credential = encrypted_credential -class Db2TableDataset(Dataset): - """The Db2 table dataset. +class CommonDataServiceForAppsSink(CopySink): + """A copy activity Common Data Service for Apps sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Copy sink type.Constant filled by server. :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table - properties instead. - :type table_name: object - :param schema_type_properties_schema: The Db2 schema name. Type: string (or Expression with - resultType string). - :type schema_type_properties_schema: object - :param table: The Db2 table name. Type: string (or Expression with resultType string). - :type table: object + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param write_behavior: Required. The write behavior for the operation. Possible values include: + "Upsert". + :type write_behavior: str or ~azure.synapse.artifacts.models.DynamicsSinkWriteBehavior + :param ignore_null_values: The flag indicating whether to ignore null values from input dataset + (except key fields) during write operation. Default is false. Type: boolean (or Expression with + resultType boolean). + :type ignore_null_values: object + :param alternate_key_name: The logical name of the alternate key which will be used when + upserting records. Type: string (or Expression with resultType string). + :type alternate_key_name: object """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, + 'write_behavior': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, + 'alternate_key_name': {'key': 'alternateKeyName', 'type': 'object'}, } def __init__( self, *, - linked_service_name: "LinkedServiceReference", + write_behavior: Union[str, "DynamicsSinkWriteBehavior"], additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, - schema_type_properties_schema: Optional[object] = None, - table: Optional[object] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + ignore_null_values: Optional[object] = None, + alternate_key_name: Optional[object] = None, **kwargs ): - super(Db2TableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'Db2Table' - self.table_name = table_name - self.schema_type_properties_schema = schema_type_properties_schema - self.table = table + super(CommonDataServiceForAppsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'CommonDataServiceForAppsSink' # type: str + self.write_behavior = write_behavior + self.ignore_null_values = ignore_null_values + self.alternate_key_name = alternate_key_name -class DeleteActivity(ExecutionActivity): - """Delete activity. +class CommonDataServiceForAppsSource(CopySource): + """A copy activity Common Data Service for Apps source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param recursive: If true, files or sub-folders under current folder path will be deleted - recursively. Default is false. Type: boolean (or Expression with resultType boolean). - :type recursive: object - :param max_concurrent_connections: The max concurrent connections to connect data source at the - same time. - :type max_concurrent_connections: int - :param enable_logging: Whether to record detailed logs of delete-activity execution. Default - value is false. Type: boolean (or Expression with resultType boolean). - :type enable_logging: object - :param log_storage_settings: Log storage settings customer need to provide when enableLogging - is true. - :type log_storage_settings: ~azure.synapse.artifacts.models.LogStorageSettings - :param dataset: Required. Delete activity dataset reference. - :type dataset: ~azure.synapse.artifacts.models.DatasetReference + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query: FetchXML is a proprietary query language that is used in Microsoft Common Data + Service for Apps (online & on-premises). Type: string (or Expression with resultType string). + :type query: object """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, - 'max_concurrent_connections': {'minimum': 1}, - 'dataset': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'recursive': {'key': 'typeProperties.recursive', 'type': 'object'}, - 'max_concurrent_connections': {'key': 'typeProperties.maxConcurrentConnections', 'type': 'int'}, - 'enable_logging': {'key': 'typeProperties.enableLogging', 'type': 'object'}, - 'log_storage_settings': {'key': 'typeProperties.logStorageSettings', 'type': 'LogStorageSettings'}, - 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, } def __init__( self, *, - name: str, - dataset: "DatasetReference", additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - depends_on: Optional[List["ActivityDependency"]] = None, - user_properties: Optional[List["UserProperty"]] = None, - linked_service_name: Optional["LinkedServiceReference"] = None, - policy: Optional["ActivityPolicy"] = None, - recursive: Optional[object] = None, - max_concurrent_connections: Optional[int] = None, - enable_logging: Optional[object] = None, - log_storage_settings: Optional["LogStorageSettings"] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query: Optional[object] = None, **kwargs ): - super(DeleteActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type: str = 'Delete' - self.recursive = recursive - self.max_concurrent_connections = max_concurrent_connections - self.enable_logging = enable_logging - self.log_storage_settings = log_storage_settings - self.dataset = dataset + super(CommonDataServiceForAppsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'CommonDataServiceForAppsSource' # type: str + self.query = query -class DeleteDataFlowDebugSessionRequest(msrest.serialization.Model): - """Request body structure for deleting data flow debug session. +class ConcurLinkedService(LinkedService): + """Concur Service linked service. - :param session_id: The ID of data flow debug session. - :type session_id: str - :param data_flow_name: The data flow which contains the debug session. - :type data_flow_name: str + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param client_id: Required. Application client_id supplied by Concur App Management. + :type client_id: object + :param username: Required. The user name that you use to access Concur Service. + :type username: object + :param password: The password corresponding to the user name that you provided in the username + field. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using + HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name in the server's + certificate to match the host name of the server when connecting over SSL. The default value is + true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of the server when + connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object """ + _validation = { + 'type': {'required': True}, + 'client_id': {'required': True}, + 'username': {'required': True}, + } + _attribute_map = { - 'session_id': {'key': 'sessionId', 'type': 'str'}, - 'data_flow_name': {'key': 'dataFlowName', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, *, - session_id: Optional[str] = None, - data_flow_name: Optional[str] = None, + client_id: object, + username: object, + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + password: Optional["SecretBase"] = None, + use_encrypted_endpoints: Optional[object] = None, + use_host_verification: Optional[object] = None, + use_peer_verification: Optional[object] = None, + encrypted_credential: Optional[object] = None, **kwargs ): - super(DeleteDataFlowDebugSessionRequest, self).__init__(**kwargs) - self.session_id = session_id - self.data_flow_name = data_flow_name + super(ConcurLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'Concur' # type: str + self.client_id = client_id + self.username = username + self.password = password + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential -class DelimitedTextDataset(Dataset): - """Delimited text dataset. +class ConcurObjectDataset(Dataset): + """Concur Service dataset. All required parameters must be populated in order to send to Azure. @@ -7481,35 +8303,8 @@ class DelimitedTextDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param location: The location of the delimited text storage. - :type location: ~azure.synapse.artifacts.models.DatasetLocation - :param column_delimiter: The column delimiter. Type: string (or Expression with resultType - string). - :type column_delimiter: object - :param row_delimiter: The row delimiter. Type: string (or Expression with resultType string). - :type row_delimiter: object - :param encoding_name: The code page name of the preferred encoding. If miss, the default value - is UTF-8, unless BOM denotes another Unicode encoding. Refer to the name column of the table in - the following link to set supported values: - https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with - resultType string). - :type encoding_name: object - :param compression_codec: Possible values include: "bzip2", "gzip", "deflate", "zipDeflate", - "snappy", "lz4". - :type compression_codec: str or ~azure.synapse.artifacts.models.DelimitedTextCompressionCodec - :param compression_level: The data compression method used for DelimitedText. Possible values - include: "Optimal", "Fastest". - :type compression_level: str or ~azure.synapse.artifacts.models.DatasetCompressionLevel - :param quote_char: The quote character. Type: string (or Expression with resultType string). - :type quote_char: object - :param escape_char: The escape character. Type: string (or Expression with resultType string). - :type escape_char: object - :param first_row_as_header: When used as input, treat the first row of data as headers. When - used as output,write the headers into the output as the first row of data. The default value is - false. Type: boolean (or Expression with resultType boolean). - :type first_row_as_header: object - :param null_value: The null value string. Type: string (or Expression with resultType string). - :type null_value: object + :param table_name: The table name. Type: string (or Expression with resultType string). + :type table_name: object """ _validation = { @@ -7527,16 +8322,7 @@ class DelimitedTextDataset(Dataset): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'column_delimiter': {'key': 'typeProperties.columnDelimiter', 'type': 'object'}, - 'row_delimiter': {'key': 'typeProperties.rowDelimiter', 'type': 'object'}, - 'encoding_name': {'key': 'typeProperties.encodingName', 'type': 'object'}, - 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'str'}, - 'compression_level': {'key': 'typeProperties.compressionLevel', 'type': 'str'}, - 'quote_char': {'key': 'typeProperties.quoteChar', 'type': 'object'}, - 'escape_char': {'key': 'typeProperties.escapeChar', 'type': 'object'}, - 'first_row_as_header': {'key': 'typeProperties.firstRowAsHeader', 'type': 'object'}, - 'null_value': {'key': 'typeProperties.nullValue', 'type': 'object'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } def __init__( @@ -7550,249 +8336,290 @@ def __init__( parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, folder: Optional["DatasetFolder"] = None, - location: Optional["DatasetLocation"] = None, - column_delimiter: Optional[object] = None, - row_delimiter: Optional[object] = None, - encoding_name: Optional[object] = None, - compression_codec: Optional[Union[str, "DelimitedTextCompressionCodec"]] = None, - compression_level: Optional[Union[str, "DatasetCompressionLevel"]] = None, - quote_char: Optional[object] = None, - escape_char: Optional[object] = None, - first_row_as_header: Optional[object] = None, - null_value: Optional[object] = None, + table_name: Optional[object] = None, **kwargs ): - super(DelimitedTextDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'DelimitedText' - self.location = location - self.column_delimiter = column_delimiter - self.row_delimiter = row_delimiter - self.encoding_name = encoding_name - self.compression_codec = compression_codec - self.compression_level = compression_level - self.quote_char = quote_char - self.escape_char = escape_char - self.first_row_as_header = first_row_as_header - self.null_value = null_value + super(ConcurObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'ConcurObject' # type: str + self.table_name = table_name -class DocumentDbCollectionDataset(Dataset): - """Microsoft Azure Document Database Collection dataset. +class ConcurSource(TabularSource): + """A copy activity Concur Service source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param collection_name: Required. Document Database collection name. Type: string (or - Expression with resultType string). - :type collection_name: object + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, - 'collection_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'collection_name': {'key': 'typeProperties.collectionName', 'type': 'object'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, } def __init__( self, *, - linked_service_name: "LinkedServiceReference", - collection_name: object, additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, **kwargs ): - super(DocumentDbCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'DocumentDbCollection' - self.collection_name = collection_name + super(ConcurSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'ConcurSource' # type: str + self.query = query -class DrillLinkedService(LinkedService): - """Drill server linked service. +class ControlActivity(Activity): + """Base class for all control activities like IfCondition, ForEach , Until. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. + :param description: Activity description. :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: An ODBC connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param pwd: The Azure key vault secret reference of password in connection string. - :type pwd: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] """ _validation = { + 'name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, } def __init__( self, *, + name: str, additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - connection_string: Optional[object] = None, - pwd: Optional["AzureKeyVaultSecretReference"] = None, - encrypted_credential: Optional[object] = None, + depends_on: Optional[List["ActivityDependency"]] = None, + user_properties: Optional[List["UserProperty"]] = None, **kwargs ): - super(DrillLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Drill' - self.connection_string = connection_string - self.pwd = pwd - self.encrypted_credential = encrypted_credential + super(ControlActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.type = 'Container' # type: str -class DrillTableDataset(Dataset): - """Drill server dataset. +class CopyActivity(ExecutionActivity): + """Copy activity. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. :type type: str - :param description: Dataset description. + :param description: Activity description. :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table - properties instead. - :type table_name: object - :param table: The table name of the Drill. Type: string (or Expression with resultType string). - :type table: object - :param schema_type_properties_schema: The schema name of the Drill. Type: string (or Expression - with resultType string). - :type schema_type_properties_schema: object + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param inputs: List of inputs for the activity. + :type inputs: list[~azure.synapse.artifacts.models.DatasetReference] + :param outputs: List of outputs for the activity. + :type outputs: list[~azure.synapse.artifacts.models.DatasetReference] + :param source: Required. Copy activity source. + :type source: ~azure.synapse.artifacts.models.CopySource + :param sink: Required. Copy activity sink. + :type sink: ~azure.synapse.artifacts.models.CopySink + :param translator: Copy activity translator. If not specified, tabular translator is used. + :type translator: object + :param enable_staging: Specifies whether to copy data via an interim staging. Default value is + false. Type: boolean (or Expression with resultType boolean). + :type enable_staging: object + :param staging_settings: Specifies interim staging settings when EnableStaging is true. + :type staging_settings: ~azure.synapse.artifacts.models.StagingSettings + :param parallel_copies: Maximum number of concurrent sessions opened on the source or sink to + avoid overloading the data store. Type: integer (or Expression with resultType integer), + minimum: 0. + :type parallel_copies: object + :param data_integration_units: Maximum number of data integration units that can be used to + perform this data movement. Type: integer (or Expression with resultType integer), minimum: 0. + :type data_integration_units: object + :param enable_skip_incompatible_row: Whether to skip incompatible row. Default value is false. + Type: boolean (or Expression with resultType boolean). + :type enable_skip_incompatible_row: object + :param redirect_incompatible_row_settings: Redirect incompatible row settings when + EnableSkipIncompatibleRow is true. + :type redirect_incompatible_row_settings: + ~azure.synapse.artifacts.models.RedirectIncompatibleRowSettings + :param preserve_rules: Preserve Rules. + :type preserve_rules: list[object] + :param preserve: Preserve rules. + :type preserve: list[object] """ _validation = { + 'name': {'required': True}, 'type': {'required': True}, - 'linked_service_name': {'required': True}, + 'source': {'required': True}, + 'sink': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'inputs': {'key': 'inputs', 'type': '[DatasetReference]'}, + 'outputs': {'key': 'outputs', 'type': '[DatasetReference]'}, + 'source': {'key': 'typeProperties.source', 'type': 'CopySource'}, + 'sink': {'key': 'typeProperties.sink', 'type': 'CopySink'}, + 'translator': {'key': 'typeProperties.translator', 'type': 'object'}, + 'enable_staging': {'key': 'typeProperties.enableStaging', 'type': 'object'}, + 'staging_settings': {'key': 'typeProperties.stagingSettings', 'type': 'StagingSettings'}, + 'parallel_copies': {'key': 'typeProperties.parallelCopies', 'type': 'object'}, + 'data_integration_units': {'key': 'typeProperties.dataIntegrationUnits', 'type': 'object'}, + 'enable_skip_incompatible_row': {'key': 'typeProperties.enableSkipIncompatibleRow', 'type': 'object'}, + 'redirect_incompatible_row_settings': {'key': 'typeProperties.redirectIncompatibleRowSettings', 'type': 'RedirectIncompatibleRowSettings'}, + 'preserve_rules': {'key': 'typeProperties.preserveRules', 'type': '[object]'}, + 'preserve': {'key': 'typeProperties.preserve', 'type': '[object]'}, } def __init__( self, *, - linked_service_name: "LinkedServiceReference", + name: str, + source: "CopySource", + sink: "CopySink", additional_properties: Optional[Dict[str, object]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, - table: Optional[object] = None, - schema_type_properties_schema: Optional[object] = None, + depends_on: Optional[List["ActivityDependency"]] = None, + user_properties: Optional[List["UserProperty"]] = None, + linked_service_name: Optional["LinkedServiceReference"] = None, + policy: Optional["ActivityPolicy"] = None, + inputs: Optional[List["DatasetReference"]] = None, + outputs: Optional[List["DatasetReference"]] = None, + translator: Optional[object] = None, + enable_staging: Optional[object] = None, + staging_settings: Optional["StagingSettings"] = None, + parallel_copies: Optional[object] = None, + data_integration_units: Optional[object] = None, + enable_skip_incompatible_row: Optional[object] = None, + redirect_incompatible_row_settings: Optional["RedirectIncompatibleRowSettings"] = None, + preserve_rules: Optional[List[object]] = None, + preserve: Optional[List[object]] = None, **kwargs ): - super(DrillTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'DrillTable' - self.table_name = table_name - self.table = table - self.schema_type_properties_schema = schema_type_properties_schema - - -class DynamicsAXLinkedService(LinkedService): - """Dynamics AX linked service. + super(CopyActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.type = 'Copy' # type: str + self.inputs = inputs + self.outputs = outputs + self.source = source + self.sink = sink + self.translator = translator + self.enable_staging = enable_staging + self.staging_settings = staging_settings + self.parallel_copies = parallel_copies + self.data_integration_units = data_integration_units + self.enable_skip_incompatible_row = enable_skip_incompatible_row + self.redirect_incompatible_row_settings = redirect_incompatible_row_settings + self.preserve_rules = preserve_rules + self.preserve = preserve + + +class CopyTranslator(msrest.serialization.Model): + """A copy activity translator. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: TabularTranslator. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy translator type.Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'TabularTranslator': 'TabularTranslator'} + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + **kwargs + ): + super(CopyTranslator, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = 'CopyTranslator' # type: str + + +class CosmosDbLinkedService(LinkedService): + """Microsoft Azure Cosmos Database (CosmosDB) linked service. All required parameters must be populated in order to send to Azure. @@ -7809,23 +8636,17 @@ class DynamicsAXLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param url: Required. The Dynamics AX (or Dynamics 365 Finance and Operations) instance OData - endpoint. - :type url: object - :param service_principal_id: Required. Specify the application's client ID. Type: string (or + :param connection_string: The connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param account_endpoint: The endpoint of the Azure CosmosDB account. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: Required. Specify the application's key. Mark this field as a - SecureString to store it securely in Data Factory, or reference a secret stored in Azure Key - Vault. Type: string (or Expression with resultType string). - :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase - :param tenant: Required. Specify the tenant information (domain name or tenant ID) under which - your application resides. Retrieve it by hovering the mouse in the top-right corner of the - Azure portal. Type: string (or Expression with resultType string). - :type tenant: object - :param aad_resource_id: Required. Specify the resource you are requesting authorization. Type: - string (or Expression with resultType string). - :type aad_resource_id: object + :type account_endpoint: object + :param database: The name of the database. Type: string (or Expression with resultType string). + :type database: object + :param account_key: The account key of the Azure CosmosDB account. Type: SecureString or + AzureKeyVaultSecretReference. + :type account_key: ~azure.synapse.artifacts.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -7834,11 +8655,6 @@ class DynamicsAXLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'url': {'required': True}, - 'service_principal_id': {'required': True}, - 'service_principal_key': {'required': True}, - 'tenant': {'required': True}, - 'aad_resource_id': {'required': True}, } _attribute_map = { @@ -7848,42 +8664,39 @@ class DynamicsAXLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'account_endpoint': {'key': 'typeProperties.accountEndpoint', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'account_key': {'key': 'typeProperties.accountKey', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, *, - url: object, - service_principal_id: object, - service_principal_key: "SecretBase", - tenant: object, - aad_resource_id: object, additional_properties: Optional[Dict[str, object]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, + connection_string: Optional[object] = None, + account_endpoint: Optional[object] = None, + database: Optional[object] = None, + account_key: Optional["SecretBase"] = None, encrypted_credential: Optional[object] = None, **kwargs ): - super(DynamicsAXLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'DynamicsAX' - self.url = url - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.tenant = tenant - self.aad_resource_id = aad_resource_id + super(CosmosDbLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'CosmosDb' # type: str + self.connection_string = connection_string + self.account_endpoint = account_endpoint + self.database = database + self.account_key = account_key self.encrypted_credential = encrypted_credential -class DynamicsAXResourceDataset(Dataset): - """The path of the Dynamics AX OData entity. +class CosmosDbMongoDbApiCollectionDataset(Dataset): + """The CosmosDB (MongoDB API) database dataset. All required parameters must be populated in order to send to Azure. @@ -7909,15 +8722,15 @@ class DynamicsAXResourceDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param path: Required. The path of the Dynamics AX OData entity. Type: string (or Expression - with resultType string). - :type path: object + :param collection: Required. The collection name of the CosmosDB (MongoDB API) database. Type: + string (or Expression with resultType string). + :type collection: object """ _validation = { 'type': {'required': True}, 'linked_service_name': {'required': True}, - 'path': {'required': True}, + 'collection': {'required': True}, } _attribute_map = { @@ -7930,14 +8743,14 @@ class DynamicsAXResourceDataset(Dataset): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'path': {'key': 'typeProperties.path', 'type': 'object'}, + 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, } def __init__( self, *, linked_service_name: "LinkedServiceReference", - path: object, + collection: object, additional_properties: Optional[Dict[str, object]] = None, description: Optional[str] = None, structure: Optional[object] = None, @@ -7947,217 +8760,210 @@ def __init__( folder: Optional["DatasetFolder"] = None, **kwargs ): - super(DynamicsAXResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'DynamicsAXResource' - self.path = path + super(CosmosDbMongoDbApiCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'CosmosDbMongoDbApiCollection' # type: str + self.collection = collection -class DynamicsCrmEntityDataset(Dataset): - """The Dynamics CRM entity dataset. +class CosmosDbMongoDbApiLinkedService(LinkedService): + """Linked service for CosmosDB (MongoDB API) data source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Type of linked service.Constant filled by server. :type type: str - :param description: Dataset description. + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. + :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. + :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param entity_name: The logical name of the entity. Type: string (or Expression with resultType - string). - :type entity_name: object + :param connection_string: Required. The CosmosDB (MongoDB API) connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param database: Required. The name of the CosmosDB (MongoDB API) database that you want to + access. Type: string (or Expression with resultType string). + :type database: object """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, + 'connection_string': {'required': True}, + 'database': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, } def __init__( self, *, - linked_service_name: "LinkedServiceReference", + connection_string: object, + database: object, additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, - entity_name: Optional[object] = None, **kwargs ): - super(DynamicsCrmEntityDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'DynamicsCrmEntity' - self.entity_name = entity_name + super(CosmosDbMongoDbApiLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'CosmosDbMongoDbApi' # type: str + self.connection_string = connection_string + self.database = database -class DynamicsCrmLinkedService(LinkedService): - """Dynamics CRM linked service. +class CosmosDbMongoDbApiSink(CopySink): + """A copy activity sink for a CosmosDB (MongoDB API) database. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Copy sink type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param deployment_type: Required. The deployment type of the Dynamics CRM instance. 'Online' - for Dynamics CRM Online and 'OnPremisesWithIfd' for Dynamics CRM on-premises with Ifd. Type: - string (or Expression with resultType string). Possible values include: "Online", - "OnPremisesWithIfd". - :type deployment_type: str or ~azure.synapse.artifacts.models.DynamicsDeploymentType - :param host_name: The host name of the on-premises Dynamics CRM server. The property is - required for on-prem and not allowed for online. Type: string (or Expression with resultType - string). - :type host_name: object - :param port: The port of on-premises Dynamics CRM server. The property is required for on-prem - and not allowed for online. Default is 443. Type: integer (or Expression with resultType + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. - :type port: object - :param service_uri: The URL to the Microsoft Dynamics CRM server. The property is required for - on-line and not allowed for on-prem. Type: string (or Expression with resultType string). - :type service_uri: object - :param organization_name: The organization name of the Dynamics CRM instance. The property is - required for on-prem and required for online when there are more than one Dynamics CRM - instances associated with the user. Type: string (or Expression with resultType string). - :type organization_name: object - :param authentication_type: Required. The authentication type to connect to Dynamics CRM - server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, - 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or - Expression with resultType string). Possible values include: "Office365", "Ifd", - "AADServicePrincipal". - :type authentication_type: str or ~azure.synapse.artifacts.models.DynamicsAuthenticationType - :param username: User name to access the Dynamics CRM instance. Type: string (or Expression - with resultType string). - :type username: object - :param password: Password to access the Dynamics CRM instance. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param service_principal_id: The client ID of the application in Azure Active Directory used - for Server-To-Server authentication. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_credential_type: The service principal credential type to use in - Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' - for certificate. Type: string (or Expression with resultType string). Possible values include: - "ServicePrincipalKey", "ServicePrincipalCert". - :type service_principal_credential_type: str or - ~azure.synapse.artifacts.models.DynamicsServicePrincipalCredentialType - :param service_principal_credential: The credential of the service principal object in Azure - Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', - servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If - servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only - be AzureKeyVaultSecretReference. - :type service_principal_credential: ~azure.synapse.artifacts.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'deployment_type': {'required': True}, - 'authentication_type': {'required': True}, + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param write_behavior: Specifies whether the document with same key to be overwritten (upsert) + rather than throw exception (insert). The default value is "insert". Type: string (or + Expression with resultType string). Type: string (or Expression with resultType string). + :type write_behavior: object + """ + + _validation = { + 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, - 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, - 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'str'}, - 'service_principal_credential': {'key': 'typeProperties.servicePrincipalCredential', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, } def __init__( self, *, - deployment_type: Union[str, "DynamicsDeploymentType"], - authentication_type: Union[str, "DynamicsAuthenticationType"], additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, - description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - host_name: Optional[object] = None, - port: Optional[object] = None, - service_uri: Optional[object] = None, - organization_name: Optional[object] = None, - username: Optional[object] = None, - password: Optional["SecretBase"] = None, - service_principal_id: Optional[object] = None, - service_principal_credential_type: Optional[Union[str, "DynamicsServicePrincipalCredentialType"]] = None, - service_principal_credential: Optional["SecretBase"] = None, - encrypted_credential: Optional[object] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + write_behavior: Optional[object] = None, **kwargs ): - super(DynamicsCrmLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'DynamicsCrm' - self.deployment_type = deployment_type - self.host_name = host_name - self.port = port - self.service_uri = service_uri - self.organization_name = organization_name - self.authentication_type = authentication_type - self.username = username - self.password = password - self.service_principal_id = service_principal_id - self.service_principal_credential_type = service_principal_credential_type - self.service_principal_credential = service_principal_credential - self.encrypted_credential = encrypted_credential + super(CosmosDbMongoDbApiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'CosmosDbMongoDbApiSink' # type: str + self.write_behavior = write_behavior -class DynamicsEntityDataset(Dataset): - """The Dynamics entity dataset. +class CosmosDbMongoDbApiSource(CopySource): + """A copy activity source for a CosmosDB (MongoDB API) database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param filter: Specifies selection filter using query operators. To return all documents in a + collection, omit this parameter or pass an empty document ({}). Type: string (or Expression + with resultType string). + :type filter: object + :param cursor_methods: Cursor methods for Mongodb query. + :type cursor_methods: ~azure.synapse.artifacts.models.MongoDbCursorMethodsProperties + :param batch_size: Specifies the number of documents to return in each batch of the response + from MongoDB instance. In most cases, modifying the batch size will not affect the user or the + application. This property's main purpose is to avoid hit the limitation of response size. + Type: integer (or Expression with resultType integer). + :type batch_size: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'filter': {'key': 'filter', 'type': 'object'}, + 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, + 'batch_size': {'key': 'batchSize', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + filter: Optional[object] = None, + cursor_methods: Optional["MongoDbCursorMethodsProperties"] = None, + batch_size: Optional[object] = None, + query_timeout: Optional[object] = None, + **kwargs + ): + super(CosmosDbMongoDbApiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'CosmosDbMongoDbApiSource' # type: str + self.filter = filter + self.cursor_methods = cursor_methods + self.batch_size = batch_size + self.query_timeout = query_timeout + + +class CosmosDbSqlApiCollectionDataset(Dataset): + """Microsoft Azure CosmosDB (SQL API) Collection dataset. All required parameters must be populated in order to send to Azure. @@ -8183,14 +8989,15 @@ class DynamicsEntityDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param entity_name: The logical name of the entity. Type: string (or Expression with resultType - string). - :type entity_name: object + :param collection_name: Required. CosmosDB (SQL API) collection name. Type: string (or + Expression with resultType string). + :type collection_name: object """ _validation = { 'type': {'required': True}, 'linked_service_name': {'required': True}, + 'collection_name': {'required': True}, } _attribute_map = { @@ -8203,13 +9010,14 @@ class DynamicsEntityDataset(Dataset): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, + 'collection_name': {'key': 'typeProperties.collectionName', 'type': 'object'}, } def __init__( self, *, linked_service_name: "LinkedServiceReference", + collection_name: object, additional_properties: Optional[Dict[str, object]] = None, description: Optional[str] = None, structure: Optional[object] = None, @@ -8217,148 +9025,140 @@ def __init__( parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, folder: Optional["DatasetFolder"] = None, - entity_name: Optional[object] = None, **kwargs ): - super(DynamicsEntityDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'DynamicsEntity' - self.entity_name = entity_name + super(CosmosDbSqlApiCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'CosmosDbSqlApiCollection' # type: str + self.collection_name = collection_name -class DynamicsLinkedService(LinkedService): - """Dynamics linked service. +class CosmosDbSqlApiSink(CopySink): + """A copy activity Azure CosmosDB (SQL API) Collection sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Copy sink type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param deployment_type: Required. The deployment type of the Dynamics instance. 'Online' for - Dynamics Online and 'OnPremisesWithIfd' for Dynamics on-premises with Ifd. Type: string (or - Expression with resultType string). Possible values include: "Online", "OnPremisesWithIfd". - :type deployment_type: str or ~azure.synapse.artifacts.models.DynamicsDeploymentType - :param host_name: The host name of the on-premises Dynamics server. The property is required - for on-prem and not allowed for online. Type: string (or Expression with resultType string). - :type host_name: str - :param port: The port of on-premises Dynamics server. The property is required for on-prem and - not allowed for online. Default is 443. Type: integer (or Expression with resultType integer), - minimum: 0. - :type port: str - :param service_uri: The URL to the Microsoft Dynamics server. The property is required for on- - line and not allowed for on-prem. Type: string (or Expression with resultType string). - :type service_uri: str - :param organization_name: The organization name of the Dynamics instance. The property is - required for on-prem and required for online when there are more than one Dynamics instances - associated with the user. Type: string (or Expression with resultType string). - :type organization_name: str - :param authentication_type: Required. The authentication type to connect to Dynamics server. - 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, 'AADServicePrincipal' - for Server-To-Server authentication in online scenario. Type: string (or Expression with - resultType string). Possible values include: "Office365", "Ifd", "AADServicePrincipal". - :type authentication_type: str or ~azure.synapse.artifacts.models.DynamicsAuthenticationType - :param username: User name to access the Dynamics instance. Type: string (or Expression with - resultType string). - :type username: object - :param password: Password to access the Dynamics instance. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param service_principal_id: The client ID of the application in Azure Active Directory used - for Server-To-Server authentication. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_credential_type: The service principal credential type to use in - Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' - for certificate. Type: string (or Expression with resultType string). Possible values include: - "ServicePrincipalKey", "ServicePrincipalCert". - :type service_principal_credential_type: str or - ~azure.synapse.artifacts.models.DynamicsServicePrincipalCredentialType - :param service_principal_credential: The credential of the service principal object in Azure - Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', - servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If - servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only - be AzureKeyVaultSecretReference. - :type service_principal_credential: ~azure.synapse.artifacts.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param write_behavior: Describes how to write data to Azure Cosmos DB. Type: string (or + Expression with resultType string). Allowed values: insert and upsert. + :type write_behavior: object """ _validation = { 'type': {'required': True}, - 'deployment_type': {'required': True}, - 'authentication_type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, - 'host_name': {'key': 'typeProperties.hostName', 'type': 'str'}, - 'port': {'key': 'typeProperties.port', 'type': 'str'}, - 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'str'}, - 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'str'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'str'}, - 'service_principal_credential': {'key': 'typeProperties.servicePrincipalCredential', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, } def __init__( self, *, - deployment_type: Union[str, "DynamicsDeploymentType"], - authentication_type: Union[str, "DynamicsAuthenticationType"], additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, - description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - host_name: Optional[str] = None, - port: Optional[str] = None, - service_uri: Optional[str] = None, - organization_name: Optional[str] = None, - username: Optional[object] = None, - password: Optional["SecretBase"] = None, - service_principal_id: Optional[object] = None, - service_principal_credential_type: Optional[Union[str, "DynamicsServicePrincipalCredentialType"]] = None, - service_principal_credential: Optional["SecretBase"] = None, - encrypted_credential: Optional[object] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + write_behavior: Optional[object] = None, **kwargs ): - super(DynamicsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Dynamics' - self.deployment_type = deployment_type - self.host_name = host_name - self.port = port - self.service_uri = service_uri - self.organization_name = organization_name - self.authentication_type = authentication_type - self.username = username - self.password = password - self.service_principal_id = service_principal_id - self.service_principal_credential_type = service_principal_credential_type - self.service_principal_credential = service_principal_credential - self.encrypted_credential = encrypted_credential + super(CosmosDbSqlApiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'CosmosDbSqlApiSink' # type: str + self.write_behavior = write_behavior -class EloquaLinkedService(LinkedService): - """Eloqua server linked service. +class CosmosDbSqlApiSource(CopySource): + """A copy activity Azure CosmosDB (SQL API) Collection source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query: SQL API query. Type: string (or Expression with resultType string). + :type query: object + :param page_size: Page size of the result. Type: integer (or Expression with resultType + integer). + :type page_size: object + :param preferred_regions: Preferred regions. Type: array of strings (or Expression with + resultType array of strings). + :type preferred_regions: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + 'page_size': {'key': 'pageSize', 'type': 'object'}, + 'preferred_regions': {'key': 'preferredRegions', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query: Optional[object] = None, + page_size: Optional[object] = None, + preferred_regions: Optional[object] = None, + **kwargs + ): + super(CosmosDbSqlApiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'CosmosDbSqlApiSource' # type: str + self.query = query + self.page_size = page_size + self.preferred_regions = preferred_regions + + +class CouchbaseLinkedService(LinkedService): + """Couchbase server linked service. All required parameters must be populated in order to send to Azure. @@ -8375,23 +9175,11 @@ class EloquaLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param endpoint: Required. The endpoint of the Eloqua server. (i.e. eloqua.example.com). - :type endpoint: object - :param username: Required. The site name and user name of your Eloqua account in the form: - sitename/username. (i.e. Eloqua/Alice). - :type username: object - :param password: The password corresponding to the user name. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using - HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name in the server's - certificate to match the host name of the server when connecting over SSL. The default value is - true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of the server when - connecting over SSL. The default value is true. - :type use_peer_verification: object + :param connection_string: An ODBC connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param cred_string: The Azure key vault secret reference of credString in connection string. + :type cred_string: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -8400,8 +9188,6 @@ class EloquaLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'endpoint': {'required': True}, - 'username': {'required': True}, } _attribute_map = { @@ -8411,45 +9197,90 @@ class EloquaLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'cred_string': {'key': 'typeProperties.credString', 'type': 'AzureKeyVaultSecretReference'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, *, - endpoint: object, - username: object, additional_properties: Optional[Dict[str, object]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, - password: Optional["SecretBase"] = None, - use_encrypted_endpoints: Optional[object] = None, - use_host_verification: Optional[object] = None, - use_peer_verification: Optional[object] = None, + connection_string: Optional[object] = None, + cred_string: Optional["AzureKeyVaultSecretReference"] = None, encrypted_credential: Optional[object] = None, **kwargs ): - super(EloquaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Eloqua' - self.endpoint = endpoint - self.username = username - self.password = password - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification + super(CouchbaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'Couchbase' # type: str + self.connection_string = connection_string + self.cred_string = cred_string self.encrypted_credential = encrypted_credential -class EloquaObjectDataset(Dataset): - """Eloqua server dataset. +class CouchbaseSource(TabularSource): + """A copy activity Couchbase server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(CouchbaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'CouchbaseSource' # type: str + self.query = query + + +class CouchbaseTableDataset(Dataset): + """Couchbase server dataset. All required parameters must be populated in order to send to Azure. @@ -8511,224 +9342,107 @@ def __init__( table_name: Optional[object] = None, **kwargs ): - super(EloquaObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'EloquaObject' + super(CouchbaseTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'CouchbaseTable' # type: str self.table_name = table_name -class EvaluateDataFlowExpressionRequest(msrest.serialization.Model): - """Request body structure for data flow expression preview. +class CreateDataFlowDebugSessionRequest(msrest.serialization.Model): + """Request body structure for creating data flow debug session. - :param session_id: The ID of data flow debug session. - :type session_id: str - :param data_flow_name: The data flow which contains the debug session. + :param data_flow_name: The name of the data flow. :type data_flow_name: str - :param stream_name: The output stream name. - :type stream_name: str - :param row_limits: The row limit for preview request. - :type row_limits: int - :param expression: The expression for preview. - :type expression: str + :param existing_cluster_id: The ID of existing Databricks cluster. + :type existing_cluster_id: str + :param cluster_timeout: Timeout setting for Databricks cluster. + :type cluster_timeout: int + :param new_cluster_name: The name of new Databricks cluster. + :type new_cluster_name: str + :param new_cluster_node_type: The type of new Databricks cluster. + :type new_cluster_node_type: str + :param data_bricks_linked_service: Data bricks linked service. + :type data_bricks_linked_service: ~azure.synapse.artifacts.models.LinkedServiceResource """ _attribute_map = { - 'session_id': {'key': 'sessionId', 'type': 'str'}, 'data_flow_name': {'key': 'dataFlowName', 'type': 'str'}, - 'stream_name': {'key': 'streamName', 'type': 'str'}, - 'row_limits': {'key': 'rowLimits', 'type': 'int'}, - 'expression': {'key': 'expression', 'type': 'str'}, + 'existing_cluster_id': {'key': 'existingClusterId', 'type': 'str'}, + 'cluster_timeout': {'key': 'clusterTimeout', 'type': 'int'}, + 'new_cluster_name': {'key': 'newClusterName', 'type': 'str'}, + 'new_cluster_node_type': {'key': 'newClusterNodeType', 'type': 'str'}, + 'data_bricks_linked_service': {'key': 'dataBricksLinkedService', 'type': 'LinkedServiceResource'}, } def __init__( self, *, - session_id: Optional[str] = None, data_flow_name: Optional[str] = None, - stream_name: Optional[str] = None, - row_limits: Optional[int] = None, - expression: Optional[str] = None, + existing_cluster_id: Optional[str] = None, + cluster_timeout: Optional[int] = None, + new_cluster_name: Optional[str] = None, + new_cluster_node_type: Optional[str] = None, + data_bricks_linked_service: Optional["LinkedServiceResource"] = None, **kwargs ): - super(EvaluateDataFlowExpressionRequest, self).__init__(**kwargs) - self.session_id = session_id + super(CreateDataFlowDebugSessionRequest, self).__init__(**kwargs) self.data_flow_name = data_flow_name - self.stream_name = stream_name - self.row_limits = row_limits - self.expression = expression - + self.existing_cluster_id = existing_cluster_id + self.cluster_timeout = cluster_timeout + self.new_cluster_name = new_cluster_name + self.new_cluster_node_type = new_cluster_node_type + self.data_bricks_linked_service = data_bricks_linked_service -class ExecuteDataFlowActivity(ExecutionActivity): - """Execute data flow activity. - All required parameters must be populated in order to send to Azure. +class CreateDataFlowDebugSessionResponse(msrest.serialization.Model): + """Response body structure for creating data flow debug session. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param data_flow: Required. Data flow reference. - :type data_flow: ~azure.synapse.artifacts.models.DataFlowReference - :param staging: Staging info for execute data flow activity. - :type staging: ~azure.synapse.artifacts.models.DataFlowStagingInfo - :param integration_runtime: The integration runtime reference. - :type integration_runtime: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param compute: Compute properties for data flow activity. - :type compute: ~azure.synapse.artifacts.models.ExecuteDataFlowActivityTypePropertiesCompute + :param session_id: The ID of data flow debug session. + :type session_id: str """ - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'data_flow': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'data_flow': {'key': 'typeProperties.dataFlow', 'type': 'DataFlowReference'}, - 'staging': {'key': 'typeProperties.staging', 'type': 'DataFlowStagingInfo'}, - 'integration_runtime': {'key': 'typeProperties.integrationRuntime', 'type': 'IntegrationRuntimeReference'}, - 'compute': {'key': 'typeProperties.compute', 'type': 'ExecuteDataFlowActivityTypePropertiesCompute'}, - } - - def __init__( - self, - *, - name: str, - data_flow: "DataFlowReference", - additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - depends_on: Optional[List["ActivityDependency"]] = None, - user_properties: Optional[List["UserProperty"]] = None, - linked_service_name: Optional["LinkedServiceReference"] = None, - policy: Optional["ActivityPolicy"] = None, - staging: Optional["DataFlowStagingInfo"] = None, - integration_runtime: Optional["IntegrationRuntimeReference"] = None, - compute: Optional["ExecuteDataFlowActivityTypePropertiesCompute"] = None, - **kwargs - ): - super(ExecuteDataFlowActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type: str = 'ExecuteDataFlow' - self.data_flow = data_flow - self.staging = staging - self.integration_runtime = integration_runtime - self.compute = compute - - -class ExecuteDataFlowActivityTypePropertiesCompute(msrest.serialization.Model): - """Compute properties for data flow activity. - - :param compute_type: Compute type of the cluster which will execute data flow job. Possible - values include: "General", "MemoryOptimized", "ComputeOptimized". - :type compute_type: str or ~azure.synapse.artifacts.models.DataFlowComputeType - :param core_count: Core count of the cluster which will execute data flow job. Supported values - are: 8, 16, 32, 48, 80, 144 and 272. - :type core_count: int - """ - - _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'core_count': {'key': 'coreCount', 'type': 'int'}, + _attribute_map = { + 'session_id': {'key': 'sessionId', 'type': 'str'}, } def __init__( self, *, - compute_type: Optional[Union[str, "DataFlowComputeType"]] = None, - core_count: Optional[int] = None, + session_id: Optional[str] = None, **kwargs ): - super(ExecuteDataFlowActivityTypePropertiesCompute, self).__init__(**kwargs) - self.compute_type = compute_type - self.core_count = core_count + super(CreateDataFlowDebugSessionResponse, self).__init__(**kwargs) + self.session_id = session_id -class ExecutePipelineActivity(Activity): - """Execute pipeline activity. +class CreateRunResponse(msrest.serialization.Model): + """Response body with a run identifier. All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param pipeline: Required. Pipeline reference. - :type pipeline: ~azure.synapse.artifacts.models.PipelineReference - :param parameters: Pipeline parameters. - :type parameters: dict[str, object] - :param wait_on_completion: Defines whether activity execution will wait for the dependent - pipeline execution to finish. Default is false. - :type wait_on_completion: bool + :param run_id: Required. Identifier of a run. + :type run_id: str """ _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'pipeline': {'required': True}, + 'run_id': {'required': True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'pipeline': {'key': 'typeProperties.pipeline', 'type': 'PipelineReference'}, - 'parameters': {'key': 'typeProperties.parameters', 'type': '{object}'}, - 'wait_on_completion': {'key': 'typeProperties.waitOnCompletion', 'type': 'bool'}, + 'run_id': {'key': 'runId', 'type': 'str'}, } def __init__( self, *, - name: str, - pipeline: "PipelineReference", - additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - depends_on: Optional[List["ActivityDependency"]] = None, - user_properties: Optional[List["UserProperty"]] = None, - parameters: Optional[Dict[str, object]] = None, - wait_on_completion: Optional[bool] = None, + run_id: str, **kwargs ): - super(ExecutePipelineActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.type: str = 'ExecutePipeline' - self.pipeline = pipeline - self.parameters = parameters - self.wait_on_completion = wait_on_completion + super(CreateRunResponse, self).__init__(**kwargs) + self.run_id = run_id -class ExecuteSSISPackageActivity(ExecutionActivity): - """Execute SSIS package activity. +class CustomActivity(ExecutionActivity): + """Custom activity type. All required parameters must be populated in order to send to Azure. @@ -8749,42 +9463,29 @@ class ExecuteSSISPackageActivity(ExecutionActivity): :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param package_location: Required. SSIS package location. - :type package_location: ~azure.synapse.artifacts.models.SSISPackageLocation - :param runtime: Specifies the runtime to execute SSIS package. The value should be "x86" or - "x64". Type: string (or Expression with resultType string). - :type runtime: object - :param logging_level: The logging level of SSIS package execution. Type: string (or Expression - with resultType string). - :type logging_level: object - :param environment_path: The environment path to execute the SSIS package. Type: string (or - Expression with resultType string). - :type environment_path: object - :param execution_credential: The package execution credential. - :type execution_credential: ~azure.synapse.artifacts.models.SSISExecutionCredential - :param connect_via: Required. The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param project_parameters: The project level parameters to execute the SSIS package. - :type project_parameters: dict[str, ~azure.synapse.artifacts.models.SSISExecutionParameter] - :param package_parameters: The package level parameters to execute the SSIS package. - :type package_parameters: dict[str, ~azure.synapse.artifacts.models.SSISExecutionParameter] - :param project_connection_managers: The project level connection managers to execute the SSIS - package. - :type project_connection_managers: dict[str, object] - :param package_connection_managers: The package level connection managers to execute the SSIS - package. - :type package_connection_managers: dict[str, object] - :param property_overrides: The property overrides to execute the SSIS package. - :type property_overrides: dict[str, ~azure.synapse.artifacts.models.SSISPropertyOverride] - :param log_location: SSIS package execution log location. - :type log_location: ~azure.synapse.artifacts.models.SSISLogLocation + :param command: Required. Command for custom activity Type: string (or Expression with + resultType string). + :type command: object + :param resource_linked_service: Resource linked service reference. + :type resource_linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference + :param folder_path: Folder path for resource files Type: string (or Expression with resultType + string). + :type folder_path: object + :param reference_objects: Reference objects. + :type reference_objects: ~azure.synapse.artifacts.models.CustomActivityReferenceObject + :param extended_properties: User defined property bag. There is no restriction on the keys or + values that can be used. The user specified custom activity has the full responsibility to + consume and interpret the content defined. + :type extended_properties: dict[str, object] + :param retention_time_in_days: The retention time for the files submitted for custom activity. + Type: double (or Expression with resultType double). + :type retention_time_in_days: object """ _validation = { 'name': {'required': True}, 'type': {'required': True}, - 'package_location': {'required': True}, - 'connect_via': {'required': True}, + 'command': {'required': True}, } _attribute_map = { @@ -8796,153 +9497,138 @@ class ExecuteSSISPackageActivity(ExecutionActivity): 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'package_location': {'key': 'typeProperties.packageLocation', 'type': 'SSISPackageLocation'}, - 'runtime': {'key': 'typeProperties.runtime', 'type': 'object'}, - 'logging_level': {'key': 'typeProperties.loggingLevel', 'type': 'object'}, - 'environment_path': {'key': 'typeProperties.environmentPath', 'type': 'object'}, - 'execution_credential': {'key': 'typeProperties.executionCredential', 'type': 'SSISExecutionCredential'}, - 'connect_via': {'key': 'typeProperties.connectVia', 'type': 'IntegrationRuntimeReference'}, - 'project_parameters': {'key': 'typeProperties.projectParameters', 'type': '{SSISExecutionParameter}'}, - 'package_parameters': {'key': 'typeProperties.packageParameters', 'type': '{SSISExecutionParameter}'}, - 'project_connection_managers': {'key': 'typeProperties.projectConnectionManagers', 'type': '{object}'}, - 'package_connection_managers': {'key': 'typeProperties.packageConnectionManagers', 'type': '{object}'}, - 'property_overrides': {'key': 'typeProperties.propertyOverrides', 'type': '{SSISPropertyOverride}'}, - 'log_location': {'key': 'typeProperties.logLocation', 'type': 'SSISLogLocation'}, + 'command': {'key': 'typeProperties.command', 'type': 'object'}, + 'resource_linked_service': {'key': 'typeProperties.resourceLinkedService', 'type': 'LinkedServiceReference'}, + 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, + 'reference_objects': {'key': 'typeProperties.referenceObjects', 'type': 'CustomActivityReferenceObject'}, + 'extended_properties': {'key': 'typeProperties.extendedProperties', 'type': '{object}'}, + 'retention_time_in_days': {'key': 'typeProperties.retentionTimeInDays', 'type': 'object'}, } def __init__( self, *, name: str, - package_location: "SSISPackageLocation", - connect_via: "IntegrationRuntimeReference", + command: object, additional_properties: Optional[Dict[str, object]] = None, description: Optional[str] = None, depends_on: Optional[List["ActivityDependency"]] = None, user_properties: Optional[List["UserProperty"]] = None, linked_service_name: Optional["LinkedServiceReference"] = None, policy: Optional["ActivityPolicy"] = None, - runtime: Optional[object] = None, - logging_level: Optional[object] = None, - environment_path: Optional[object] = None, - execution_credential: Optional["SSISExecutionCredential"] = None, - project_parameters: Optional[Dict[str, "SSISExecutionParameter"]] = None, - package_parameters: Optional[Dict[str, "SSISExecutionParameter"]] = None, - project_connection_managers: Optional[Dict[str, object]] = None, - package_connection_managers: Optional[Dict[str, object]] = None, - property_overrides: Optional[Dict[str, "SSISPropertyOverride"]] = None, - log_location: Optional["SSISLogLocation"] = None, + resource_linked_service: Optional["LinkedServiceReference"] = None, + folder_path: Optional[object] = None, + reference_objects: Optional["CustomActivityReferenceObject"] = None, + extended_properties: Optional[Dict[str, object]] = None, + retention_time_in_days: Optional[object] = None, **kwargs ): - super(ExecuteSSISPackageActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type: str = 'ExecuteSSISPackage' - self.package_location = package_location - self.runtime = runtime - self.logging_level = logging_level - self.environment_path = environment_path - self.execution_credential = execution_credential - self.connect_via = connect_via - self.project_parameters = project_parameters - self.package_parameters = package_parameters - self.project_connection_managers = project_connection_managers - self.package_connection_managers = package_connection_managers - self.property_overrides = property_overrides - self.log_location = log_location + super(CustomActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.type = 'Custom' # type: str + self.command = command + self.resource_linked_service = resource_linked_service + self.folder_path = folder_path + self.reference_objects = reference_objects + self.extended_properties = extended_properties + self.retention_time_in_days = retention_time_in_days -class ExposureControlRequest(msrest.serialization.Model): - """The exposure control request. +class CustomActivityReferenceObject(msrest.serialization.Model): + """Reference objects for custom activity. - :param feature_name: The feature name. - :type feature_name: str - :param feature_type: The feature type. - :type feature_type: str + :param linked_services: Linked service references. + :type linked_services: list[~azure.synapse.artifacts.models.LinkedServiceReference] + :param datasets: Dataset references. + :type datasets: list[~azure.synapse.artifacts.models.DatasetReference] """ _attribute_map = { - 'feature_name': {'key': 'featureName', 'type': 'str'}, - 'feature_type': {'key': 'featureType', 'type': 'str'}, + 'linked_services': {'key': 'linkedServices', 'type': '[LinkedServiceReference]'}, + 'datasets': {'key': 'datasets', 'type': '[DatasetReference]'}, } def __init__( self, *, - feature_name: Optional[str] = None, - feature_type: Optional[str] = None, + linked_services: Optional[List["LinkedServiceReference"]] = None, + datasets: Optional[List["DatasetReference"]] = None, **kwargs ): - super(ExposureControlRequest, self).__init__(**kwargs) - self.feature_name = feature_name - self.feature_type = feature_type + super(CustomActivityReferenceObject, self).__init__(**kwargs) + self.linked_services = linked_services + self.datasets = datasets -class ExposureControlResponse(msrest.serialization.Model): - """The exposure control response. +class CustomDataset(Dataset): + """The custom dataset. - Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. - :ivar feature_name: The feature name. - :vartype feature_name: str - :ivar value: The feature value. - :vartype value: str - """ - - _validation = { - 'feature_name': {'readonly': True}, - 'value': {'readonly': True}, - } - - _attribute_map = { - 'feature_name': {'key': 'featureName', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(ExposureControlResponse, self).__init__(**kwargs) - self.feature_name = None - self.value = None - - -class Expression(msrest.serialization.Model): - """Azure Synapse expression definition. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar type: Required. Expression type. Default value: "Expression". - :vartype type: str - :param value: Required. Expression value. - :type value: str + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param type_properties: Custom dataset properties. + :type type_properties: object """ _validation = { - 'type': {'required': True, 'constant': True}, - 'value': {'required': True}, + 'type': {'required': True}, + 'linked_service_name': {'required': True}, } _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type_properties': {'key': 'typeProperties', 'type': 'object'}, } - type = "Expression" - def __init__( self, *, - value: str, + linked_service_name: "LinkedServiceReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + type_properties: Optional[object] = None, **kwargs ): - super(Expression, self).__init__(**kwargs) - self.value = value + super(CustomDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'CustomDataset' # type: str + self.type_properties = type_properties -class FileServerLinkedService(LinkedService): - """File system linked service. +class CustomDataSourceLinkedService(LinkedService): + """Custom linked service. All required parameters must be populated in order to send to Azure. @@ -8959,23 +9645,13 @@ class FileServerLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param host: Required. Host name of the server. Type: string (or Expression with resultType - string). - :type host: object - :param user_id: User ID to logon the server. Type: string (or Expression with resultType - string). - :type user_id: object - :param password: Password to logon the server. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param type_properties: Required. Custom linked service properties. + :type type_properties: object """ _validation = { 'type': {'required': True}, - 'host': {'required': True}, + 'type_properties': {'required': True}, } _attribute_map = { @@ -8985,36 +9661,59 @@ class FileServerLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'user_id': {'key': 'typeProperties.userId', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'type_properties': {'key': 'typeProperties', 'type': 'object'}, } def __init__( self, *, - host: object, + type_properties: object, additional_properties: Optional[Dict[str, object]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, - user_id: Optional[object] = None, - password: Optional["SecretBase"] = None, - encrypted_credential: Optional[object] = None, **kwargs ): - super(FileServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'FileServer' - self.host = host - self.user_id = user_id - self.password = password - self.encrypted_credential = encrypted_credential + super(CustomDataSourceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'CustomDataSource' # type: str + self.type_properties = type_properties -class FilterActivity(Activity): - """Filter and return results from input array based on the conditions. +class CustomSetupBase(msrest.serialization.Model): + """The base definition of the custom setup. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: . + + All required parameters must be populated in order to send to Azure. + + :param type: Required. The type of custom setup.Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {} + } + + def __init__( + self, + **kwargs + ): + super(CustomSetupBase, self).__init__(**kwargs) + self.type = None # type: Optional[str] + + +class DatabricksNotebookActivity(ExecutionActivity): + """DatabricksNotebook activity. All required parameters must be populated in order to send to Azure. @@ -9031,17 +9730,25 @@ class FilterActivity(Activity): :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param items: Required. Input array on which filter should be applied. - :type items: ~azure.synapse.artifacts.models.Expression - :param condition: Required. Condition to be used for filtering the input. - :type condition: ~azure.synapse.artifacts.models.Expression + :param linked_service_name: Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param notebook_path: Required. The absolute path of the notebook to be run in the Databricks + Workspace. This path must begin with a slash. Type: string (or Expression with resultType + string). + :type notebook_path: object + :param base_parameters: Base parameters to be used for each run of this job.If the notebook + takes a parameter that is not specified, the default value from the notebook will be used. + :type base_parameters: dict[str, object] + :param libraries: A list of libraries to be installed on the cluster that will execute the job. + :type libraries: list[dict[str, object]] """ _validation = { 'name': {'required': True}, 'type': {'required': True}, - 'items': {'required': True}, - 'condition': {'required': True}, + 'notebook_path': {'required': True}, } _attribute_map = { @@ -9051,30 +9758,37 @@ class FilterActivity(Activity): 'description': {'key': 'description', 'type': 'str'}, 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'items': {'key': 'typeProperties.items', 'type': 'Expression'}, - 'condition': {'key': 'typeProperties.condition', 'type': 'Expression'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'notebook_path': {'key': 'typeProperties.notebookPath', 'type': 'object'}, + 'base_parameters': {'key': 'typeProperties.baseParameters', 'type': '{object}'}, + 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, } def __init__( self, *, name: str, - items: "Expression", - condition: "Expression", + notebook_path: object, additional_properties: Optional[Dict[str, object]] = None, description: Optional[str] = None, depends_on: Optional[List["ActivityDependency"]] = None, user_properties: Optional[List["UserProperty"]] = None, + linked_service_name: Optional["LinkedServiceReference"] = None, + policy: Optional["ActivityPolicy"] = None, + base_parameters: Optional[Dict[str, object]] = None, + libraries: Optional[List[Dict[str, object]]] = None, **kwargs ): - super(FilterActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.type: str = 'Filter' - self.items = items - self.condition = condition + super(DatabricksNotebookActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.type = 'DatabricksNotebook' # type: str + self.notebook_path = notebook_path + self.base_parameters = base_parameters + self.libraries = libraries -class ForEachActivity(Activity): - """This activity is used for iterating over a collection and execute given activities. +class DatabricksSparkJarActivity(ExecutionActivity): + """DatabricksSparkJar activity. All required parameters must be populated in order to send to Azure. @@ -9091,23 +9805,24 @@ class ForEachActivity(Activity): :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param is_sequential: Should the loop be executed in sequence or in parallel (max 50). - :type is_sequential: bool - :param batch_count: Batch count to be used for controlling the number of parallel execution - (when isSequential is set to false). - :type batch_count: int - :param items: Required. Collection to iterate. - :type items: ~azure.synapse.artifacts.models.Expression - :param activities: Required. List of activities to execute . - :type activities: list[~azure.synapse.artifacts.models.Activity] + :param linked_service_name: Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param main_class_name: Required. The full name of the class containing the main method to be + executed. This class must be contained in a JAR provided as a library. Type: string (or + Expression with resultType string). + :type main_class_name: object + :param parameters: Parameters that will be passed to the main method. + :type parameters: list[object] + :param libraries: A list of libraries to be installed on the cluster that will execute the job. + :type libraries: list[dict[str, object]] """ _validation = { 'name': {'required': True}, 'type': {'required': True}, - 'batch_count': {'maximum': 50}, - 'items': {'required': True}, - 'activities': {'required': True}, + 'main_class_name': {'required': True}, } _attribute_map = { @@ -9117,142 +9832,46 @@ class ForEachActivity(Activity): 'description': {'key': 'description', 'type': 'str'}, 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'is_sequential': {'key': 'typeProperties.isSequential', 'type': 'bool'}, - 'batch_count': {'key': 'typeProperties.batchCount', 'type': 'int'}, - 'items': {'key': 'typeProperties.items', 'type': 'Expression'}, - 'activities': {'key': 'typeProperties.activities', 'type': '[Activity]'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'main_class_name': {'key': 'typeProperties.mainClassName', 'type': 'object'}, + 'parameters': {'key': 'typeProperties.parameters', 'type': '[object]'}, + 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, } def __init__( self, *, name: str, - items: "Expression", - activities: List["Activity"], + main_class_name: object, additional_properties: Optional[Dict[str, object]] = None, description: Optional[str] = None, depends_on: Optional[List["ActivityDependency"]] = None, user_properties: Optional[List["UserProperty"]] = None, - is_sequential: Optional[bool] = None, - batch_count: Optional[int] = None, + linked_service_name: Optional["LinkedServiceReference"] = None, + policy: Optional["ActivityPolicy"] = None, + parameters: Optional[List[object]] = None, + libraries: Optional[List[Dict[str, object]]] = None, **kwargs ): - super(ForEachActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.type: str = 'ForEach' - self.is_sequential = is_sequential - self.batch_count = batch_count - self.items = items - self.activities = activities + super(DatabricksSparkJarActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.type = 'DatabricksSparkJar' # type: str + self.main_class_name = main_class_name + self.parameters = parameters + self.libraries = libraries -class FtpServerLinkedService(LinkedService): - """A FTP server Linked Service. +class DatabricksSparkPythonActivity(ExecutionActivity): + """DatabricksSparkPython activity. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param host: Required. Host name of the FTP server. Type: string (or Expression with resultType - string). - :type host: object - :param port: The TCP port number that the FTP server uses to listen for client connections. - Default value is 21. Type: integer (or Expression with resultType integer), minimum: 0. - :type port: object - :param authentication_type: The authentication type to be used to connect to the FTP server. - Possible values include: "Basic", "Anonymous". - :type authentication_type: str or ~azure.synapse.artifacts.models.FtpAuthenticationType - :param user_name: Username to logon the FTP server. Type: string (or Expression with resultType - string). - :type user_name: object - :param password: Password to logon the FTP server. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - :param enable_ssl: If true, connect to the FTP server over SSL/TLS channel. Default value is - true. Type: boolean (or Expression with resultType boolean). - :type enable_ssl: object - :param enable_server_certificate_validation: If true, validate the FTP server SSL certificate - when connect over SSL/TLS channel. Default value is true. Type: boolean (or Expression with - resultType boolean). - :type enable_server_certificate_validation: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, - } - - def __init__( - self, - *, - host: object, - additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, - description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - port: Optional[object] = None, - authentication_type: Optional[Union[str, "FtpAuthenticationType"]] = None, - user_name: Optional[object] = None, - password: Optional["SecretBase"] = None, - encrypted_credential: Optional[object] = None, - enable_ssl: Optional[object] = None, - enable_server_certificate_validation: Optional[object] = None, - **kwargs - ): - super(FtpServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'FtpServer' - self.host = host - self.port = port - self.authentication_type = authentication_type - self.user_name = user_name - self.password = password - self.encrypted_credential = encrypted_credential - self.enable_ssl = enable_ssl - self.enable_server_certificate_validation = enable_server_certificate_validation - - -class GetMetadataActivity(ExecutionActivity): - """Activity to get metadata of dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. :type type: str :param description: Activity description. :type description: str @@ -9264,16 +9883,19 @@ class GetMetadataActivity(ExecutionActivity): :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param dataset: Required. GetMetadata activity dataset reference. - :type dataset: ~azure.synapse.artifacts.models.DatasetReference - :param field_list: Fields of metadata to get from dataset. - :type field_list: list[object] + :param python_file: Required. The URI of the Python file to be executed. DBFS paths are + supported. Type: string (or Expression with resultType string). + :type python_file: object + :param parameters: Command line parameters that will be passed to the Python file. + :type parameters: list[object] + :param libraries: A list of libraries to be installed on the cluster that will execute the job. + :type libraries: list[dict[str, object]] """ _validation = { 'name': {'required': True}, 'type': {'required': True}, - 'dataset': {'required': True}, + 'python_file': {'required': True}, } _attribute_map = { @@ -9285,2112 +9907,12350 @@ class GetMetadataActivity(ExecutionActivity): 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, - 'field_list': {'key': 'typeProperties.fieldList', 'type': '[object]'}, + 'python_file': {'key': 'typeProperties.pythonFile', 'type': 'object'}, + 'parameters': {'key': 'typeProperties.parameters', 'type': '[object]'}, + 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, } def __init__( self, *, name: str, - dataset: "DatasetReference", + python_file: object, additional_properties: Optional[Dict[str, object]] = None, description: Optional[str] = None, depends_on: Optional[List["ActivityDependency"]] = None, user_properties: Optional[List["UserProperty"]] = None, linked_service_name: Optional["LinkedServiceReference"] = None, policy: Optional["ActivityPolicy"] = None, - field_list: Optional[List[object]] = None, + parameters: Optional[List[object]] = None, + libraries: Optional[List[Dict[str, object]]] = None, **kwargs ): - super(GetMetadataActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type: str = 'GetMetadata' - self.dataset = dataset - self.field_list = field_list - - -class GetSsisObjectMetadataRequest(msrest.serialization.Model): - """The request payload of get SSIS object metadata. - - :param metadata_path: Metadata path. - :type metadata_path: str - """ - - _attribute_map = { - 'metadata_path': {'key': 'metadataPath', 'type': 'str'}, - } + super(DatabricksSparkPythonActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.type = 'DatabricksSparkPython' # type: str + self.python_file = python_file + self.parameters = parameters + self.libraries = libraries - def __init__( - self, - *, - metadata_path: Optional[str] = None, - **kwargs - ): - super(GetSsisObjectMetadataRequest, self).__init__(**kwargs) - self.metadata_path = metadata_path +class DataFlow(msrest.serialization.Model): + """Azure Synapse nested object which contains a flow with data movements and transformations. -class GoogleAdWordsLinkedService(LinkedService): - """Google AdWords service linked service. + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: MappingDataFlow. All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Type of data flow.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. + :param description: The description of the data flow. :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. + :param annotations: List of tags that can be used for describing the data flow. :type annotations: list[object] - :param client_customer_id: Required. The Client customer ID of the AdWords account that you - want to fetch report data for. - :type client_customer_id: object - :param developer_token: Required. The developer token associated with the manager account that - you use to grant access to the AdWords API. - :type developer_token: ~azure.synapse.artifacts.models.SecretBase - :param authentication_type: Required. The OAuth 2.0 authentication mechanism used for - authentication. ServiceAuthentication can only be used on self-hosted IR. Possible values - include: "ServiceAuthentication", "UserAuthentication". - :type authentication_type: str or - ~azure.synapse.artifacts.models.GoogleAdWordsAuthenticationType - :param refresh_token: The refresh token obtained from Google for authorizing access to AdWords - for UserAuthentication. - :type refresh_token: ~azure.synapse.artifacts.models.SecretBase - :param client_id: The client id of the google application used to acquire the refresh token. - Type: string (or Expression with resultType string). - :type client_id: object - :param client_secret: The client secret of the google application used to acquire the refresh - token. - :type client_secret: ~azure.synapse.artifacts.models.SecretBase - :param email: The service account email ID that is used for ServiceAuthentication and can only - be used on self-hosted IR. - :type email: object - :param key_file_path: The full path to the .p12 key file that is used to authenticate the - service account email address and can only be used on self-hosted IR. - :type key_file_path: object - :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for - verifying the server when connecting over SSL. This property can only be set when using SSL on - self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust - store or from a specified PEM file. The default value is false. - :type use_system_trust_store: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param folder: The folder that this data flow is in. If not specified, Data flow will appear at + the root level. + :type folder: ~azure.synapse.artifacts.models.DataFlowFolder """ _validation = { 'type': {'required': True}, - 'client_customer_id': {'required': True}, - 'developer_token': {'required': True}, - 'authentication_type': {'required': True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'client_customer_id': {'key': 'typeProperties.clientCustomerID', 'type': 'object'}, - 'developer_token': {'key': 'typeProperties.developerToken', 'type': 'SecretBase'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'email': {'key': 'typeProperties.email', 'type': 'object'}, - 'key_file_path': {'key': 'typeProperties.keyFilePath', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'folder': {'key': 'folder', 'type': 'DataFlowFolder'}, + } + + _subtype_map = { + 'type': {'MappingDataFlow': 'MappingDataFlow'} } def __init__( self, *, - client_customer_id: object, - developer_token: "SecretBase", - authentication_type: Union[str, "GoogleAdWordsAuthenticationType"], - additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, - refresh_token: Optional["SecretBase"] = None, - client_id: Optional[object] = None, - client_secret: Optional["SecretBase"] = None, - email: Optional[object] = None, - key_file_path: Optional[object] = None, - trusted_cert_path: Optional[object] = None, - use_system_trust_store: Optional[object] = None, - encrypted_credential: Optional[object] = None, + folder: Optional["DataFlowFolder"] = None, **kwargs ): - super(GoogleAdWordsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'GoogleAdWords' - self.client_customer_id = client_customer_id - self.developer_token = developer_token - self.authentication_type = authentication_type - self.refresh_token = refresh_token - self.client_id = client_id - self.client_secret = client_secret - self.email = email - self.key_file_path = key_file_path - self.trusted_cert_path = trusted_cert_path - self.use_system_trust_store = use_system_trust_store - self.encrypted_credential = encrypted_credential - + super(DataFlow, self).__init__(**kwargs) + self.type = None # type: Optional[str] + self.description = description + self.annotations = annotations + self.folder = folder -class GoogleAdWordsObjectDataset(Dataset): - """Google AdWords service dataset. + +class DataFlowDebugCommandRequest(msrest.serialization.Model): + """Request body structure for data flow expression preview. All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :param session_id: Required. The ID of data flow debug session. + :type session_id: str + :param data_flow_name: The data flow which contains the debug session. + :type data_flow_name: str + :param command_name: The command name. + :type command_name: str + :param command_payload: Required. The command payload object. + :type command_payload: object """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + 'session_id': {'required': True}, + 'command_payload': {'required': True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'session_id': {'key': 'sessionId', 'type': 'str'}, + 'data_flow_name': {'key': 'dataFlowName', 'type': 'str'}, + 'command_name': {'key': 'commandName', 'type': 'str'}, + 'command_payload': {'key': 'commandPayload', 'type': 'object'}, } def __init__( self, *, - linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, + session_id: str, + command_payload: object, + data_flow_name: Optional[str] = None, + command_name: Optional[str] = None, **kwargs ): - super(GoogleAdWordsObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'GoogleAdWordsObject' - self.table_name = table_name - + super(DataFlowDebugCommandRequest, self).__init__(**kwargs) + self.session_id = session_id + self.data_flow_name = data_flow_name + self.command_name = command_name + self.command_payload = command_payload -class GoogleBigQueryLinkedService(LinkedService): - """Google BigQuery service linked service. - All required parameters must be populated in order to send to Azure. +class DataFlowDebugCommandResponse(msrest.serialization.Model): + """Response body structure of data flow result for data preview, statistics or expression preview. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param project: Required. The default BigQuery project to query against. - :type project: object - :param additional_projects: A comma-separated list of public BigQuery projects to access. - :type additional_projects: object - :param request_google_drive_scope: Whether to request access to Google Drive. Allowing Google - Drive access enables support for federated tables that combine BigQuery data with data from - Google Drive. The default value is false. - :type request_google_drive_scope: object - :param authentication_type: Required. The OAuth 2.0 authentication mechanism used for - authentication. ServiceAuthentication can only be used on self-hosted IR. Possible values - include: "ServiceAuthentication", "UserAuthentication". - :type authentication_type: str or - ~azure.synapse.artifacts.models.GoogleBigQueryAuthenticationType - :param refresh_token: The refresh token obtained from Google for authorizing access to BigQuery - for UserAuthentication. - :type refresh_token: ~azure.synapse.artifacts.models.SecretBase - :param client_id: The client id of the google application used to acquire the refresh token. - Type: string (or Expression with resultType string). - :type client_id: object - :param client_secret: The client secret of the google application used to acquire the refresh - token. - :type client_secret: ~azure.synapse.artifacts.models.SecretBase - :param email: The service account email ID that is used for ServiceAuthentication and can only - be used on self-hosted IR. - :type email: object - :param key_file_path: The full path to the .p12 key file that is used to authenticate the - service account email address and can only be used on self-hosted IR. - :type key_file_path: object - :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for - verifying the server when connecting over SSL. This property can only be set when using SSL on - self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust - store or from a specified PEM file. The default value is false. - :type use_system_trust_store: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param status: The run status of data preview, statistics or expression preview. + :type status: str + :param data: The result data of data preview, statistics or expression preview. + :type data: str """ - _validation = { - 'type': {'required': True}, - 'project': {'required': True}, - 'authentication_type': {'required': True}, - } - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'project': {'key': 'typeProperties.project', 'type': 'object'}, - 'additional_projects': {'key': 'typeProperties.additionalProjects', 'type': 'object'}, - 'request_google_drive_scope': {'key': 'typeProperties.requestGoogleDriveScope', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'email': {'key': 'typeProperties.email', 'type': 'object'}, - 'key_file_path': {'key': 'typeProperties.keyFilePath', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'status': {'key': 'status', 'type': 'str'}, + 'data': {'key': 'data', 'type': 'str'}, } def __init__( self, *, - project: object, - authentication_type: Union[str, "GoogleBigQueryAuthenticationType"], - additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, - description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - additional_projects: Optional[object] = None, - request_google_drive_scope: Optional[object] = None, - refresh_token: Optional["SecretBase"] = None, - client_id: Optional[object] = None, - client_secret: Optional["SecretBase"] = None, - email: Optional[object] = None, - key_file_path: Optional[object] = None, - trusted_cert_path: Optional[object] = None, - use_system_trust_store: Optional[object] = None, - encrypted_credential: Optional[object] = None, + status: Optional[str] = None, + data: Optional[str] = None, **kwargs ): - super(GoogleBigQueryLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'GoogleBigQuery' - self.project = project - self.additional_projects = additional_projects - self.request_google_drive_scope = request_google_drive_scope - self.authentication_type = authentication_type - self.refresh_token = refresh_token - self.client_id = client_id - self.client_secret = client_secret - self.email = email - self.key_file_path = key_file_path - self.trusted_cert_path = trusted_cert_path - self.use_system_trust_store = use_system_trust_store - self.encrypted_credential = encrypted_credential - + super(DataFlowDebugCommandResponse, self).__init__(**kwargs) + self.status = status + self.data = data -class GoogleBigQueryObjectDataset(Dataset): - """Google BigQuery service dataset. - All required parameters must be populated in order to send to Azure. +class DataFlowDebugPackage(msrest.serialization.Model): + """Request body structure for starting data flow debug session. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: This property will be retired. Please consider using database + table - properties instead. - :type table_name: object - :param table: The table name of the Google BigQuery. Type: string (or Expression with - resultType string). - :type table: object - :param dataset: The database name of the Google BigQuery. Type: string (or Expression with - resultType string). - :type dataset: object + :param session_id: The ID of data flow debug session. + :type session_id: str + :param data_flow: Data flow instance. + :type data_flow: ~azure.synapse.artifacts.models.DataFlowDebugResource + :param datasets: List of datasets. + :type datasets: list[~azure.synapse.artifacts.models.DatasetDebugResource] + :param linked_services: List of linked services. + :type linked_services: list[~azure.synapse.artifacts.models.LinkedServiceDebugResource] + :param staging: Staging info for debug session. + :type staging: ~azure.synapse.artifacts.models.DataFlowStagingInfo + :param debug_settings: Data flow debug settings. + :type debug_settings: ~azure.synapse.artifacts.models.DataFlowDebugPackageDebugSettings """ - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'dataset': {'key': 'typeProperties.dataset', 'type': 'object'}, + 'session_id': {'key': 'sessionId', 'type': 'str'}, + 'data_flow': {'key': 'dataFlow', 'type': 'DataFlowDebugResource'}, + 'datasets': {'key': 'datasets', 'type': '[DatasetDebugResource]'}, + 'linked_services': {'key': 'linkedServices', 'type': '[LinkedServiceDebugResource]'}, + 'staging': {'key': 'staging', 'type': 'DataFlowStagingInfo'}, + 'debug_settings': {'key': 'debugSettings', 'type': 'DataFlowDebugPackageDebugSettings'}, } def __init__( self, *, - linked_service_name: "LinkedServiceReference", additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, - table: Optional[object] = None, - dataset: Optional[object] = None, + session_id: Optional[str] = None, + data_flow: Optional["DataFlowDebugResource"] = None, + datasets: Optional[List["DatasetDebugResource"]] = None, + linked_services: Optional[List["LinkedServiceDebugResource"]] = None, + staging: Optional["DataFlowStagingInfo"] = None, + debug_settings: Optional["DataFlowDebugPackageDebugSettings"] = None, **kwargs ): - super(GoogleBigQueryObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'GoogleBigQueryObject' - self.table_name = table_name - self.table = table - self.dataset = dataset - + super(DataFlowDebugPackage, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.session_id = session_id + self.data_flow = data_flow + self.datasets = datasets + self.linked_services = linked_services + self.staging = staging + self.debug_settings = debug_settings -class GoogleCloudStorageLinkedService(LinkedService): - """Linked service for Google Cloud Storage. - All required parameters must be populated in order to send to Azure. +class DataFlowDebugPackageDebugSettings(msrest.serialization.Model): + """Data flow debug settings. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param access_key_id: The access key identifier of the Google Cloud Storage Identity and Access - Management (IAM) user. Type: string (or Expression with resultType string). - :type access_key_id: object - :param secret_access_key: The secret access key of the Google Cloud Storage Identity and Access - Management (IAM) user. - :type secret_access_key: ~azure.synapse.artifacts.models.SecretBase - :param service_url: This value specifies the endpoint to access with the Google Cloud Storage - Connector. This is an optional property; change it only if you want to try a different service - endpoint or want to switch between https and http. Type: string (or Expression with resultType - string). - :type service_url: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param source_settings: Source setting for data flow debug. + :type source_settings: list[~azure.synapse.artifacts.models.DataFlowSourceSetting] + :param parameters: Data flow parameters. + :type parameters: dict[str, object] + :param dataset_parameters: Parameters for dataset. + :type dataset_parameters: object """ - _validation = { - 'type': {'required': True}, - } - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, - 'secret_access_key': {'key': 'typeProperties.secretAccessKey', 'type': 'SecretBase'}, - 'service_url': {'key': 'typeProperties.serviceUrl', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'source_settings': {'key': 'sourceSettings', 'type': '[DataFlowSourceSetting]'}, + 'parameters': {'key': 'parameters', 'type': '{object}'}, + 'dataset_parameters': {'key': 'datasetParameters', 'type': 'object'}, } def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, - description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - access_key_id: Optional[object] = None, - secret_access_key: Optional["SecretBase"] = None, - service_url: Optional[object] = None, - encrypted_credential: Optional[object] = None, + source_settings: Optional[List["DataFlowSourceSetting"]] = None, + parameters: Optional[Dict[str, object]] = None, + dataset_parameters: Optional[object] = None, **kwargs ): - super(GoogleCloudStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'GoogleCloudStorage' - self.access_key_id = access_key_id - self.secret_access_key = secret_access_key - self.service_url = service_url - self.encrypted_credential = encrypted_credential - + super(DataFlowDebugPackageDebugSettings, self).__init__(**kwargs) + self.source_settings = source_settings + self.parameters = parameters + self.dataset_parameters = dataset_parameters -class GreenplumLinkedService(LinkedService): - """Greenplum Database linked service. - All required parameters must be populated in order to send to Azure. +class DataFlowDebugPreviewDataRequest(msrest.serialization.Model): + """Request body structure for data flow preview data. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: An ODBC connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param pwd: The Azure key vault secret reference of password in connection string. - :type pwd: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param session_id: The ID of data flow debug session. + :type session_id: str + :param data_flow_name: The data flow which contains the debug session. + :type data_flow_name: str + :param stream_name: The output stream name. + :type stream_name: str + :param row_limits: The row limit for preview request. + :type row_limits: int """ - _validation = { - 'type': {'required': True}, - } - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'session_id': {'key': 'sessionId', 'type': 'str'}, + 'data_flow_name': {'key': 'dataFlowName', 'type': 'str'}, + 'stream_name': {'key': 'streamName', 'type': 'str'}, + 'row_limits': {'key': 'rowLimits', 'type': 'int'}, } def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, - description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - connection_string: Optional[object] = None, - pwd: Optional["AzureKeyVaultSecretReference"] = None, - encrypted_credential: Optional[object] = None, + session_id: Optional[str] = None, + data_flow_name: Optional[str] = None, + stream_name: Optional[str] = None, + row_limits: Optional[int] = None, **kwargs ): - super(GreenplumLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Greenplum' - self.connection_string = connection_string - self.pwd = pwd - self.encrypted_credential = encrypted_credential - + super(DataFlowDebugPreviewDataRequest, self).__init__(**kwargs) + self.session_id = session_id + self.data_flow_name = data_flow_name + self.stream_name = stream_name + self.row_limits = row_limits -class GreenplumTableDataset(Dataset): - """Greenplum Database dataset. - All required parameters must be populated in order to send to Azure. +class DataFlowDebugQueryResponse(msrest.serialization.Model): + """Response body structure of data flow query for data preview, statistics or expression preview. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table - properties instead. - :type table_name: object - :param table: The table name of Greenplum. Type: string (or Expression with resultType string). - :type table: object - :param schema_type_properties_schema: The schema name of Greenplum. Type: string (or Expression - with resultType string). - :type schema_type_properties_schema: object + :param run_id: The run ID of data flow debug session. + :type run_id: str """ - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - } - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'run_id': {'key': 'runId', 'type': 'str'}, } def __init__( self, *, - linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, - table: Optional[object] = None, - schema_type_properties_schema: Optional[object] = None, + run_id: Optional[str] = None, **kwargs ): - super(GreenplumTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'GreenplumTable' - self.table_name = table_name - self.table = table - self.schema_type_properties_schema = schema_type_properties_schema - + super(DataFlowDebugQueryResponse, self).__init__(**kwargs) + self.run_id = run_id -class HBaseLinkedService(LinkedService): - """HBase server linked service. - All required parameters must be populated in order to send to Azure. +class SubResourceDebugResource(msrest.serialization.Model): + """Azure Synapse nested debug resource. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param host: Required. The IP address or host name of the HBase server. (i.e. 192.168.222.160). - :type host: object - :param port: The TCP port that the HBase instance uses to listen for client connections. The - default value is 9090. - :type port: object - :param http_path: The partial URL corresponding to the HBase server. (i.e. - /gateway/sandbox/hbase/version). - :type http_path: object - :param authentication_type: Required. The authentication mechanism to use to connect to the - HBase server. Possible values include: "Anonymous", "Basic". - :type authentication_type: str or ~azure.synapse.artifacts.models.HBaseAuthenticationType - :param username: The user name used to connect to the HBase instance. - :type username: object - :param password: The password corresponding to the user name. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The - default value is false. - :type enable_ssl: object - :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for - verifying the server when connecting over SSL. This property can only be set when using SSL on - self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate - name to match the host name of the server when connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: object - :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from - the server. The default value is false. - :type allow_self_signed_server_cert: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param name: The resource name. + :type name: str """ - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - 'authentication_type': {'required': True}, - } - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, - 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'name': {'key': 'name', 'type': 'str'}, } def __init__( self, *, - host: object, - authentication_type: Union[str, "HBaseAuthenticationType"], - additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, - description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - port: Optional[object] = None, - http_path: Optional[object] = None, - username: Optional[object] = None, - password: Optional["SecretBase"] = None, - enable_ssl: Optional[object] = None, - trusted_cert_path: Optional[object] = None, - allow_host_name_cn_mismatch: Optional[object] = None, - allow_self_signed_server_cert: Optional[object] = None, - encrypted_credential: Optional[object] = None, + name: Optional[str] = None, **kwargs ): - super(HBaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'HBase' - self.host = host - self.port = port - self.http_path = http_path - self.authentication_type = authentication_type - self.username = username - self.password = password - self.enable_ssl = enable_ssl - self.trusted_cert_path = trusted_cert_path - self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch - self.allow_self_signed_server_cert = allow_self_signed_server_cert - self.encrypted_credential = encrypted_credential + super(SubResourceDebugResource, self).__init__(**kwargs) + self.name = name -class HBaseObjectDataset(Dataset): - """HBase server dataset. +class DataFlowDebugResource(SubResourceDebugResource): + """Data flow debug resource. All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :param name: The resource name. + :type name: str + :param properties: Required. Data flow properties. + :type properties: ~azure.synapse.artifacts.models.DataFlow """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + 'properties': {'required': True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'name': {'key': 'name', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'DataFlow'}, } def __init__( self, *, - linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, + properties: "DataFlow", + name: Optional[str] = None, **kwargs ): - super(HBaseObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'HBaseObject' - self.table_name = table_name - + super(DataFlowDebugResource, self).__init__(name=name, **kwargs) + self.properties = properties -class HdfsLinkedService(LinkedService): - """Hadoop Distributed File System (HDFS) linked service. - All required parameters must be populated in order to send to Azure. +class DataFlowDebugResultResponse(msrest.serialization.Model): + """Response body structure of data flow result for data preview, statistics or expression preview. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param url: Required. The URL of the HDFS service endpoint, e.g. - http://myhostname:50070/webhdfs/v1 . Type: string (or Expression with resultType string). - :type url: object - :param authentication_type: Type of authentication used to connect to the HDFS. Possible values - are: Anonymous and Windows. Type: string (or Expression with resultType string). - :type authentication_type: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - :param user_name: User name for Windows authentication. Type: string (or Expression with - resultType string). - :type user_name: object - :param password: Password for Windows authentication. - :type password: ~azure.synapse.artifacts.models.SecretBase + :param status: The run status of data preview, statistics or expression preview. + :type status: str + :param data: The result data of data preview, statistics or expression preview. + :type data: str """ - _validation = { - 'type': {'required': True}, - 'url': {'required': True}, - } - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'status': {'key': 'status', 'type': 'str'}, + 'data': {'key': 'data', 'type': 'str'}, } def __init__( self, *, - url: object, - additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, - description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - authentication_type: Optional[object] = None, - encrypted_credential: Optional[object] = None, - user_name: Optional[object] = None, - password: Optional["SecretBase"] = None, + status: Optional[str] = None, + data: Optional[str] = None, **kwargs ): - super(HdfsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Hdfs' - self.url = url - self.authentication_type = authentication_type - self.encrypted_credential = encrypted_credential - self.user_name = user_name - self.password = password - + super(DataFlowDebugResultResponse, self).__init__(**kwargs) + self.status = status + self.data = data -class HDInsightHiveActivity(ExecutionActivity): - """HDInsight Hive activity type. - All required parameters must be populated in order to send to Azure. +class DataFlowDebugSessionInfo(msrest.serialization.Model): + """Data flow debug session info. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param storage_linked_services: Storage linked service references. - :type storage_linked_services: list[~azure.synapse.artifacts.models.LinkedServiceReference] - :param arguments: User specified arguments to HDInsightActivity. - :type arguments: list[object] - :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". - :type get_debug_info: str or ~azure.synapse.artifacts.models.HDInsightActivityDebugInfoOption - :param script_path: Script path. Type: string (or Expression with resultType string). - :type script_path: object - :param script_linked_service: Script linked service reference. - :type script_linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference - :param defines: Allows user to specify defines for Hive job request. - :type defines: dict[str, object] - :param variables: User specified arguments under hivevar namespace. - :type variables: list[object] - :param query_timeout: Query timeout value (in minutes). Effective when the HDInsight cluster - is with ESP (Enterprise Security Package). - :type query_timeout: int + :param data_flow_name: The name of the data flow. + :type data_flow_name: str + :param compute_type: Compute type of the cluster. + :type compute_type: str + :param core_count: Core count of the cluster. + :type core_count: int + :param node_count: Node count of the cluster. (deprecated property). + :type node_count: int + :param integration_runtime_name: Attached integration runtime name of data flow debug session. + :type integration_runtime_name: str + :param session_id: The ID of data flow debug session. + :type session_id: str + :param start_time: Start time of data flow debug session. + :type start_time: str + :param time_to_live_in_minutes: Compute type of the cluster. + :type time_to_live_in_minutes: int + :param last_activity_time: Last activity time of data flow debug session. + :type last_activity_time: str """ - _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - } - _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, - 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, - 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, - 'script_path': {'key': 'typeProperties.scriptPath', 'type': 'object'}, - 'script_linked_service': {'key': 'typeProperties.scriptLinkedService', 'type': 'LinkedServiceReference'}, - 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, - 'variables': {'key': 'typeProperties.variables', 'type': '[object]'}, - 'query_timeout': {'key': 'typeProperties.queryTimeout', 'type': 'int'}, + 'data_flow_name': {'key': 'dataFlowName', 'type': 'str'}, + 'compute_type': {'key': 'computeType', 'type': 'str'}, + 'core_count': {'key': 'coreCount', 'type': 'int'}, + 'node_count': {'key': 'nodeCount', 'type': 'int'}, + 'integration_runtime_name': {'key': 'integrationRuntimeName', 'type': 'str'}, + 'session_id': {'key': 'sessionId', 'type': 'str'}, + 'start_time': {'key': 'startTime', 'type': 'str'}, + 'time_to_live_in_minutes': {'key': 'timeToLiveInMinutes', 'type': 'int'}, + 'last_activity_time': {'key': 'lastActivityTime', 'type': 'str'}, } def __init__( self, *, - name: str, additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - depends_on: Optional[List["ActivityDependency"]] = None, - user_properties: Optional[List["UserProperty"]] = None, - linked_service_name: Optional["LinkedServiceReference"] = None, - policy: Optional["ActivityPolicy"] = None, - storage_linked_services: Optional[List["LinkedServiceReference"]] = None, - arguments: Optional[List[object]] = None, - get_debug_info: Optional[Union[str, "HDInsightActivityDebugInfoOption"]] = None, - script_path: Optional[object] = None, - script_linked_service: Optional["LinkedServiceReference"] = None, - defines: Optional[Dict[str, object]] = None, - variables: Optional[List[object]] = None, - query_timeout: Optional[int] = None, + data_flow_name: Optional[str] = None, + compute_type: Optional[str] = None, + core_count: Optional[int] = None, + node_count: Optional[int] = None, + integration_runtime_name: Optional[str] = None, + session_id: Optional[str] = None, + start_time: Optional[str] = None, + time_to_live_in_minutes: Optional[int] = None, + last_activity_time: Optional[str] = None, **kwargs ): - super(HDInsightHiveActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type: str = 'HDInsightHive' - self.storage_linked_services = storage_linked_services - self.arguments = arguments - self.get_debug_info = get_debug_info - self.script_path = script_path - self.script_linked_service = script_linked_service - self.defines = defines - self.variables = variables - self.query_timeout = query_timeout - + super(DataFlowDebugSessionInfo, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.data_flow_name = data_flow_name + self.compute_type = compute_type + self.core_count = core_count + self.node_count = node_count + self.integration_runtime_name = integration_runtime_name + self.session_id = session_id + self.start_time = start_time + self.time_to_live_in_minutes = time_to_live_in_minutes + self.last_activity_time = last_activity_time -class HDInsightLinkedService(LinkedService): - """HDInsight linked service. - All required parameters must be populated in order to send to Azure. +class DataFlowDebugStatisticsRequest(msrest.serialization.Model): + """Request body structure for data flow statistics. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param cluster_uri: Required. HDInsight cluster URI. Type: string (or Expression with - resultType string). - :type cluster_uri: object - :param user_name: HDInsight cluster user name. Type: string (or Expression with resultType - string). - :type user_name: object - :param password: HDInsight cluster password. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param linked_service_name: The Azure Storage linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param hcatalog_linked_service_name: A reference to the Azure SQL linked service that points to - the HCatalog database. - :type hcatalog_linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - :param is_esp_enabled: Specify if the HDInsight is created with ESP (Enterprise Security - Package). Type: Boolean. - :type is_esp_enabled: object - :param file_system: Specify the FileSystem if the main storage for the HDInsight is ADLS Gen2. - Type: string (or Expression with resultType string). - :type file_system: object + :param session_id: The ID of data flow debug session. + :type session_id: str + :param data_flow_name: The data flow which contains the debug session. + :type data_flow_name: str + :param stream_name: The output stream name. + :type stream_name: str + :param columns: List of column names. + :type columns: list[str] """ - _validation = { - 'type': {'required': True}, - 'cluster_uri': {'required': True}, + _attribute_map = { + 'session_id': {'key': 'sessionId', 'type': 'str'}, + 'data_flow_name': {'key': 'dataFlowName', 'type': 'str'}, + 'stream_name': {'key': 'streamName', 'type': 'str'}, + 'columns': {'key': 'columns', 'type': '[str]'}, } + def __init__( + self, + *, + session_id: Optional[str] = None, + data_flow_name: Optional[str] = None, + stream_name: Optional[str] = None, + columns: Optional[List[str]] = None, + **kwargs + ): + super(DataFlowDebugStatisticsRequest, self).__init__(**kwargs) + self.session_id = session_id + self.data_flow_name = data_flow_name + self.stream_name = stream_name + self.columns = columns + + +class DataFlowFolder(msrest.serialization.Model): + """The folder that this data flow is in. If not specified, Data flow will appear at the root level. + + :param name: The name of the folder that this data flow is in. + :type name: str + """ + _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'cluster_uri': {'key': 'typeProperties.clusterUri', 'type': 'object'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, - 'hcatalog_linked_service_name': {'key': 'typeProperties.hcatalogLinkedServiceName', 'type': 'LinkedServiceReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'is_esp_enabled': {'key': 'typeProperties.isEspEnabled', 'type': 'object'}, - 'file_system': {'key': 'typeProperties.fileSystem', 'type': 'object'}, + 'name': {'key': 'name', 'type': 'str'}, } def __init__( self, *, - cluster_uri: object, - additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, - description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - user_name: Optional[object] = None, - password: Optional["SecretBase"] = None, - linked_service_name: Optional["LinkedServiceReference"] = None, - hcatalog_linked_service_name: Optional["LinkedServiceReference"] = None, - encrypted_credential: Optional[object] = None, - is_esp_enabled: Optional[object] = None, - file_system: Optional[object] = None, + name: Optional[str] = None, **kwargs ): - super(HDInsightLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'HDInsight' - self.cluster_uri = cluster_uri - self.user_name = user_name - self.password = password - self.linked_service_name = linked_service_name - self.hcatalog_linked_service_name = hcatalog_linked_service_name - self.encrypted_credential = encrypted_credential - self.is_esp_enabled = is_esp_enabled - self.file_system = file_system + super(DataFlowFolder, self).__init__(**kwargs) + self.name = name -class HDInsightMapReduceActivity(ExecutionActivity): - """HDInsight MapReduce activity type. +class DataFlowListResponse(msrest.serialization.Model): + """A list of data flow resources. All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param storage_linked_services: Storage linked service references. - :type storage_linked_services: list[~azure.synapse.artifacts.models.LinkedServiceReference] - :param arguments: User specified arguments to HDInsightActivity. - :type arguments: list[object] - :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". - :type get_debug_info: str or ~azure.synapse.artifacts.models.HDInsightActivityDebugInfoOption - :param class_name: Required. Class name. Type: string (or Expression with resultType string). - :type class_name: object - :param jar_file_path: Required. Jar path. Type: string (or Expression with resultType string). - :type jar_file_path: object - :param jar_linked_service: Jar linked service reference. - :type jar_linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference - :param jar_libs: Jar libs. - :type jar_libs: list[object] - :param defines: Allows user to specify defines for the MapReduce job request. - :type defines: dict[str, object] + :param value: Required. List of data flows. + :type value: list[~azure.synapse.artifacts.models.DataFlowResource] + :param next_link: The link to the next page of results, if any remaining results exist. + :type next_link: str """ _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'class_name': {'required': True}, - 'jar_file_path': {'required': True}, + 'value': {'required': True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, - 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, - 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, - 'class_name': {'key': 'typeProperties.className', 'type': 'object'}, - 'jar_file_path': {'key': 'typeProperties.jarFilePath', 'type': 'object'}, - 'jar_linked_service': {'key': 'typeProperties.jarLinkedService', 'type': 'LinkedServiceReference'}, - 'jar_libs': {'key': 'typeProperties.jarLibs', 'type': '[object]'}, - 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, + 'value': {'key': 'value', 'type': '[DataFlowResource]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, } def __init__( self, *, - name: str, - class_name: object, - jar_file_path: object, - additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - depends_on: Optional[List["ActivityDependency"]] = None, - user_properties: Optional[List["UserProperty"]] = None, - linked_service_name: Optional["LinkedServiceReference"] = None, - policy: Optional["ActivityPolicy"] = None, - storage_linked_services: Optional[List["LinkedServiceReference"]] = None, - arguments: Optional[List[object]] = None, - get_debug_info: Optional[Union[str, "HDInsightActivityDebugInfoOption"]] = None, - jar_linked_service: Optional["LinkedServiceReference"] = None, - jar_libs: Optional[List[object]] = None, - defines: Optional[Dict[str, object]] = None, + value: List["DataFlowResource"], + next_link: Optional[str] = None, **kwargs ): - super(HDInsightMapReduceActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type: str = 'HDInsightMapReduce' - self.storage_linked_services = storage_linked_services - self.arguments = arguments - self.get_debug_info = get_debug_info - self.class_name = class_name - self.jar_file_path = jar_file_path - self.jar_linked_service = jar_linked_service - self.jar_libs = jar_libs - self.defines = defines + super(DataFlowListResponse, self).__init__(**kwargs) + self.value = value + self.next_link = next_link -class HDInsightOnDemandLinkedService(LinkedService): - """HDInsight ondemand linked service. +class DataFlowReference(msrest.serialization.Model): + """Data flow reference type. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param cluster_size: Required. Number of worker/data nodes in the cluster. Suggestion value: 4. - Type: string (or Expression with resultType string). - :type cluster_size: object - :param time_to_live: Required. The allowed idle time for the on-demand HDInsight cluster. - Specifies how long the on-demand HDInsight cluster stays alive after completion of an activity - run if there are no other active jobs in the cluster. The minimum value is 5 mins. Type: string - (or Expression with resultType string). - :type time_to_live: object - :param version: Required. Version of the HDInsight cluster.  Type: string (or Expression with - resultType string). - :type version: object - :param linked_service_name: Required. Azure Storage linked service to be used by the on-demand - cluster for storing and processing data. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param host_subscription_id: Required. The customer’s subscription to host the cluster. Type: - string (or Expression with resultType string). - :type host_subscription_id: object - :param service_principal_id: The service principal id for the hostSubscriptionId. Type: string - (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: The key for the service principal id. - :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase - :param tenant: Required. The Tenant id/name to which the service principal belongs. Type: - string (or Expression with resultType string). - :type tenant: object - :param cluster_resource_group: Required. The resource group where the cluster belongs. Type: - string (or Expression with resultType string). - :type cluster_resource_group: object - :param cluster_name_prefix: The prefix of cluster name, postfix will be distinct with - timestamp. Type: string (or Expression with resultType string). - :type cluster_name_prefix: object - :param cluster_user_name: The username to access the cluster. Type: string (or Expression with - resultType string). - :type cluster_user_name: object - :param cluster_password: The password to access the cluster. - :type cluster_password: ~azure.synapse.artifacts.models.SecretBase - :param cluster_ssh_user_name: The username to SSH remotely connect to cluster’s node (for - Linux). Type: string (or Expression with resultType string). - :type cluster_ssh_user_name: object - :param cluster_ssh_password: The password to SSH remotely connect cluster’s node (for Linux). - :type cluster_ssh_password: ~azure.synapse.artifacts.models.SecretBase - :param additional_linked_service_names: Specifies additional storage accounts for the HDInsight - linked service so that the Data Factory service can register them on your behalf. - :type additional_linked_service_names: - list[~azure.synapse.artifacts.models.LinkedServiceReference] - :param hcatalog_linked_service_name: The name of Azure SQL linked service that point to the - HCatalog database. The on-demand HDInsight cluster is created by using the Azure SQL database - as the metastore. - :type hcatalog_linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param cluster_type: The cluster type. Type: string (or Expression with resultType string). - :type cluster_type: object - :param spark_version: The version of spark if the cluster type is 'spark'. Type: string (or - Expression with resultType string). - :type spark_version: object - :param core_configuration: Specifies the core configuration parameters (as in core-site.xml) - for the HDInsight cluster to be created. - :type core_configuration: object - :param h_base_configuration: Specifies the HBase configuration parameters (hbase-site.xml) for - the HDInsight cluster. - :type h_base_configuration: object - :param hdfs_configuration: Specifies the HDFS configuration parameters (hdfs-site.xml) for the - HDInsight cluster. - :type hdfs_configuration: object - :param hive_configuration: Specifies the hive configuration parameters (hive-site.xml) for the - HDInsight cluster. - :type hive_configuration: object - :param map_reduce_configuration: Specifies the MapReduce configuration parameters (mapred- - site.xml) for the HDInsight cluster. - :type map_reduce_configuration: object - :param oozie_configuration: Specifies the Oozie configuration parameters (oozie-site.xml) for - the HDInsight cluster. - :type oozie_configuration: object - :param storm_configuration: Specifies the Storm configuration parameters (storm-site.xml) for - the HDInsight cluster. - :type storm_configuration: object - :param yarn_configuration: Specifies the Yarn configuration parameters (yarn-site.xml) for the - HDInsight cluster. - :type yarn_configuration: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - :param head_node_size: Specifies the size of the head node for the HDInsight cluster. - :type head_node_size: object - :param data_node_size: Specifies the size of the data node for the HDInsight cluster. - :type data_node_size: object - :param zookeeper_node_size: Specifies the size of the Zoo Keeper node for the HDInsight - cluster. - :type zookeeper_node_size: object - :param script_actions: Custom script actions to run on HDI ondemand cluster once it's up. - Please refer to https://docs.microsoft.com/en-us/azure/hdinsight/hdinsight-hadoop-customize- - cluster-linux?toc=%2Fen-us%2Fazure%2Fhdinsight%2Fr-server%2FTOC.json&bc=%2Fen- - us%2Fazure%2Fbread%2Ftoc.json#understanding-script-actions. - :type script_actions: list[~azure.synapse.artifacts.models.ScriptAction] - :param virtual_network_id: The ARM resource ID for the vNet to which the cluster should be - joined after creation. Type: string (or Expression with resultType string). - :type virtual_network_id: object - :param subnet_name: The ARM resource ID for the subnet in the vNet. If virtualNetworkId was - specified, then this property is required. Type: string (or Expression with resultType string). - :type subnet_name: object + :param type: Required. Data flow reference type. Possible values include: "DataFlowReference". + :type type: str or ~azure.synapse.artifacts.models.DataFlowReferenceType + :param reference_name: Required. Reference data flow name. + :type reference_name: str + :param dataset_parameters: Reference data flow parameters from dataset. + :type dataset_parameters: object """ _validation = { 'type': {'required': True}, - 'cluster_size': {'required': True}, - 'time_to_live': {'required': True}, - 'version': {'required': True}, - 'linked_service_name': {'required': True}, - 'host_subscription_id': {'required': True}, - 'tenant': {'required': True}, - 'cluster_resource_group': {'required': True}, + 'reference_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'cluster_size': {'key': 'typeProperties.clusterSize', 'type': 'object'}, - 'time_to_live': {'key': 'typeProperties.timeToLive', 'type': 'object'}, - 'version': {'key': 'typeProperties.version', 'type': 'object'}, - 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, - 'host_subscription_id': {'key': 'typeProperties.hostSubscriptionId', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'cluster_resource_group': {'key': 'typeProperties.clusterResourceGroup', 'type': 'object'}, - 'cluster_name_prefix': {'key': 'typeProperties.clusterNamePrefix', 'type': 'object'}, - 'cluster_user_name': {'key': 'typeProperties.clusterUserName', 'type': 'object'}, - 'cluster_password': {'key': 'typeProperties.clusterPassword', 'type': 'SecretBase'}, - 'cluster_ssh_user_name': {'key': 'typeProperties.clusterSshUserName', 'type': 'object'}, - 'cluster_ssh_password': {'key': 'typeProperties.clusterSshPassword', 'type': 'SecretBase'}, - 'additional_linked_service_names': {'key': 'typeProperties.additionalLinkedServiceNames', 'type': '[LinkedServiceReference]'}, - 'hcatalog_linked_service_name': {'key': 'typeProperties.hcatalogLinkedServiceName', 'type': 'LinkedServiceReference'}, - 'cluster_type': {'key': 'typeProperties.clusterType', 'type': 'object'}, - 'spark_version': {'key': 'typeProperties.sparkVersion', 'type': 'object'}, - 'core_configuration': {'key': 'typeProperties.coreConfiguration', 'type': 'object'}, - 'h_base_configuration': {'key': 'typeProperties.hBaseConfiguration', 'type': 'object'}, - 'hdfs_configuration': {'key': 'typeProperties.hdfsConfiguration', 'type': 'object'}, - 'hive_configuration': {'key': 'typeProperties.hiveConfiguration', 'type': 'object'}, - 'map_reduce_configuration': {'key': 'typeProperties.mapReduceConfiguration', 'type': 'object'}, - 'oozie_configuration': {'key': 'typeProperties.oozieConfiguration', 'type': 'object'}, - 'storm_configuration': {'key': 'typeProperties.stormConfiguration', 'type': 'object'}, - 'yarn_configuration': {'key': 'typeProperties.yarnConfiguration', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'head_node_size': {'key': 'typeProperties.headNodeSize', 'type': 'object'}, - 'data_node_size': {'key': 'typeProperties.dataNodeSize', 'type': 'object'}, - 'zookeeper_node_size': {'key': 'typeProperties.zookeeperNodeSize', 'type': 'object'}, - 'script_actions': {'key': 'typeProperties.scriptActions', 'type': '[ScriptAction]'}, - 'virtual_network_id': {'key': 'typeProperties.virtualNetworkId', 'type': 'object'}, - 'subnet_name': {'key': 'typeProperties.subnetName', 'type': 'object'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + 'dataset_parameters': {'key': 'datasetParameters', 'type': 'object'}, + } + + def __init__( + self, + *, + type: Union[str, "DataFlowReferenceType"], + reference_name: str, + additional_properties: Optional[Dict[str, object]] = None, + dataset_parameters: Optional[object] = None, + **kwargs + ): + super(DataFlowReference, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = type + self.reference_name = reference_name + self.dataset_parameters = dataset_parameters + + +class DataFlowResource(AzureEntityResource): + """Data flow resource type. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + :ivar etag: Resource Etag. + :vartype etag: str + :param properties: Required. Data flow properties. + :type properties: ~azure.synapse.artifacts.models.DataFlow + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'DataFlow'}, + } + + def __init__( + self, + *, + properties: "DataFlow", + **kwargs + ): + super(DataFlowResource, self).__init__(**kwargs) + self.properties = properties + + +class Transformation(msrest.serialization.Model): + """A data flow transformation. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. Transformation name. + :type name: str + :param description: Transformation description. + :type description: str + """ + + _validation = { + 'name': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + } + + def __init__( + self, + *, + name: str, + description: Optional[str] = None, + **kwargs + ): + super(Transformation, self).__init__(**kwargs) + self.name = name + self.description = description + + +class DataFlowSink(Transformation): + """Transformation for data flow sink. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. Transformation name. + :type name: str + :param description: Transformation description. + :type description: str + :param dataset: Dataset reference. + :type dataset: ~azure.synapse.artifacts.models.DatasetReference + """ + + _validation = { + 'name': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'dataset': {'key': 'dataset', 'type': 'DatasetReference'}, + } + + def __init__( + self, + *, + name: str, + description: Optional[str] = None, + dataset: Optional["DatasetReference"] = None, + **kwargs + ): + super(DataFlowSink, self).__init__(name=name, description=description, **kwargs) + self.dataset = dataset + + +class DataFlowSource(Transformation): + """Transformation for data flow source. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. Transformation name. + :type name: str + :param description: Transformation description. + :type description: str + :param dataset: Dataset reference. + :type dataset: ~azure.synapse.artifacts.models.DatasetReference + """ + + _validation = { + 'name': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'dataset': {'key': 'dataset', 'type': 'DatasetReference'}, + } + + def __init__( + self, + *, + name: str, + description: Optional[str] = None, + dataset: Optional["DatasetReference"] = None, + **kwargs + ): + super(DataFlowSource, self).__init__(name=name, description=description, **kwargs) + self.dataset = dataset + + +class DataFlowSourceSetting(msrest.serialization.Model): + """Definition of data flow source setting for debug. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param source_name: The data flow source name. + :type source_name: str + :param row_limit: Defines the row limit of data flow source in debug. + :type row_limit: int + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_name': {'key': 'sourceName', 'type': 'str'}, + 'row_limit': {'key': 'rowLimit', 'type': 'int'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_name: Optional[str] = None, + row_limit: Optional[int] = None, + **kwargs + ): + super(DataFlowSourceSetting, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.source_name = source_name + self.row_limit = row_limit + + +class DataFlowStagingInfo(msrest.serialization.Model): + """Staging info for execute data flow activity. + + :param linked_service: Staging linked service reference. + :type linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference + :param folder_path: Folder path for staging blob. + :type folder_path: str + """ + + _attribute_map = { + 'linked_service': {'key': 'linkedService', 'type': 'LinkedServiceReference'}, + 'folder_path': {'key': 'folderPath', 'type': 'str'}, + } + + def __init__( + self, + *, + linked_service: Optional["LinkedServiceReference"] = None, + folder_path: Optional[str] = None, + **kwargs + ): + super(DataFlowStagingInfo, self).__init__(**kwargs) + self.linked_service = linked_service + self.folder_path = folder_path + + +class DataLakeAnalyticsUSQLActivity(ExecutionActivity): + """Data Lake Analytics U-SQL activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param linked_service_name: Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param script_path: Required. Case-sensitive path to folder that contains the U-SQL script. + Type: string (or Expression with resultType string). + :type script_path: object + :param script_linked_service: Required. Script linked service reference. + :type script_linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference + :param degree_of_parallelism: The maximum number of nodes simultaneously used to run the job. + Default value is 1. Type: integer (or Expression with resultType integer), minimum: 1. + :type degree_of_parallelism: object + :param priority: Determines which jobs out of all that are queued should be selected to run + first. The lower the number, the higher the priority. Default value is 1000. Type: integer (or + Expression with resultType integer), minimum: 1. + :type priority: object + :param parameters: Parameters for U-SQL job request. + :type parameters: dict[str, object] + :param runtime_version: Runtime version of the U-SQL engine to use. Type: string (or Expression + with resultType string). + :type runtime_version: object + :param compilation_mode: Compilation mode of U-SQL. Must be one of these values : Semantic, + Full and SingleBox. Type: string (or Expression with resultType string). + :type compilation_mode: object + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'script_path': {'required': True}, + 'script_linked_service': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'script_path': {'key': 'typeProperties.scriptPath', 'type': 'object'}, + 'script_linked_service': {'key': 'typeProperties.scriptLinkedService', 'type': 'LinkedServiceReference'}, + 'degree_of_parallelism': {'key': 'typeProperties.degreeOfParallelism', 'type': 'object'}, + 'priority': {'key': 'typeProperties.priority', 'type': 'object'}, + 'parameters': {'key': 'typeProperties.parameters', 'type': '{object}'}, + 'runtime_version': {'key': 'typeProperties.runtimeVersion', 'type': 'object'}, + 'compilation_mode': {'key': 'typeProperties.compilationMode', 'type': 'object'}, + } + + def __init__( + self, + *, + name: str, + script_path: object, + script_linked_service: "LinkedServiceReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + depends_on: Optional[List["ActivityDependency"]] = None, + user_properties: Optional[List["UserProperty"]] = None, + linked_service_name: Optional["LinkedServiceReference"] = None, + policy: Optional["ActivityPolicy"] = None, + degree_of_parallelism: Optional[object] = None, + priority: Optional[object] = None, + parameters: Optional[Dict[str, object]] = None, + runtime_version: Optional[object] = None, + compilation_mode: Optional[object] = None, + **kwargs + ): + super(DataLakeAnalyticsUSQLActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.type = 'DataLakeAnalyticsU-SQL' # type: str + self.script_path = script_path + self.script_linked_service = script_linked_service + self.degree_of_parallelism = degree_of_parallelism + self.priority = priority + self.parameters = parameters + self.runtime_version = runtime_version + self.compilation_mode = compilation_mode + + +class DataLakeStorageAccountDetails(msrest.serialization.Model): + """Details of the data lake storage account associated with the workspace. + + :param account_url: Account URL. + :type account_url: str + :param filesystem: Filesystem name. + :type filesystem: str + """ + + _attribute_map = { + 'account_url': {'key': 'accountUrl', 'type': 'str'}, + 'filesystem': {'key': 'filesystem', 'type': 'str'}, + } + + def __init__( + self, + *, + account_url: Optional[str] = None, + filesystem: Optional[str] = None, + **kwargs + ): + super(DataLakeStorageAccountDetails, self).__init__(**kwargs) + self.account_url = account_url + self.filesystem = filesystem + + +class DatasetCompression(msrest.serialization.Model): + """The compression method used on a dataset. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: DatasetBZip2Compression, DatasetDeflateCompression, DatasetGZipCompression, DatasetZipDeflateCompression. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset compression.Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'BZip2': 'DatasetBZip2Compression', 'Deflate': 'DatasetDeflateCompression', 'GZip': 'DatasetGZipCompression', 'ZipDeflate': 'DatasetZipDeflateCompression'} + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + **kwargs + ): + super(DatasetCompression, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = 'DatasetCompression' # type: str + + +class DatasetBZip2Compression(DatasetCompression): + """The BZip2 compression method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset compression.Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + **kwargs + ): + super(DatasetBZip2Compression, self).__init__(additional_properties=additional_properties, **kwargs) + self.type = 'BZip2' # type: str + + +class DatasetDataElement(msrest.serialization.Model): + """Columns that define the structure of the dataset. + + :param name: Name of the column. Type: string (or Expression with resultType string). + :type name: object + :param type: Type of the column. Type: string (or Expression with resultType string). + :type type: object + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'object'}, + } + + def __init__( + self, + *, + name: Optional[object] = None, + type: Optional[object] = None, + **kwargs + ): + super(DatasetDataElement, self).__init__(**kwargs) + self.name = name + self.type = type + + +class DatasetDebugResource(SubResourceDebugResource): + """Dataset debug resource. + + All required parameters must be populated in order to send to Azure. + + :param name: The resource name. + :type name: str + :param properties: Required. Dataset properties. + :type properties: ~azure.synapse.artifacts.models.Dataset + """ + + _validation = { + 'properties': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'Dataset'}, + } + + def __init__( + self, + *, + properties: "Dataset", + name: Optional[str] = None, + **kwargs + ): + super(DatasetDebugResource, self).__init__(name=name, **kwargs) + self.properties = properties + + +class DatasetDeflateCompression(DatasetCompression): + """The Deflate compression method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset compression.Constant filled by server. + :type type: str + :param level: The Deflate compression level. Possible values include: "Optimal", "Fastest". + :type level: str or ~azure.synapse.artifacts.models.DatasetCompressionLevel + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'str'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + level: Optional[Union[str, "DatasetCompressionLevel"]] = None, + **kwargs + ): + super(DatasetDeflateCompression, self).__init__(additional_properties=additional_properties, **kwargs) + self.type = 'Deflate' # type: str + self.level = level + + +class DatasetFolder(msrest.serialization.Model): + """The folder that this Dataset is in. If not specified, Dataset will appear at the root level. + + :param name: The name of the folder that this Dataset is in. + :type name: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + } + + def __init__( + self, + *, + name: Optional[str] = None, + **kwargs + ): + super(DatasetFolder, self).__init__(**kwargs) + self.name = name + + +class DatasetGZipCompression(DatasetCompression): + """The GZip compression method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset compression.Constant filled by server. + :type type: str + :param level: The GZip compression level. Possible values include: "Optimal", "Fastest". + :type level: str or ~azure.synapse.artifacts.models.DatasetCompressionLevel + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'str'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + level: Optional[Union[str, "DatasetCompressionLevel"]] = None, + **kwargs + ): + super(DatasetGZipCompression, self).__init__(additional_properties=additional_properties, **kwargs) + self.type = 'GZip' # type: str + self.level = level + + +class DatasetListResponse(msrest.serialization.Model): + """A list of dataset resources. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of datasets. + :type value: list[~azure.synapse.artifacts.models.DatasetResource] + :param next_link: The link to the next page of results, if any remaining results exist. + :type next_link: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[DatasetResource]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + *, + value: List["DatasetResource"], + next_link: Optional[str] = None, + **kwargs + ): + super(DatasetListResponse, self).__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class DatasetReference(msrest.serialization.Model): + """Dataset reference type. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Dataset reference type. Possible values include: "DatasetReference". + :type type: str or ~azure.synapse.artifacts.models.DatasetReferenceType + :param reference_name: Required. Reference dataset name. + :type reference_name: str + :param parameters: Arguments for dataset. + :type parameters: dict[str, object] + """ + + _validation = { + 'type': {'required': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{object}'}, + } + + def __init__( + self, + *, + type: Union[str, "DatasetReferenceType"], + reference_name: str, + parameters: Optional[Dict[str, object]] = None, + **kwargs + ): + super(DatasetReference, self).__init__(**kwargs) + self.type = type + self.reference_name = reference_name + self.parameters = parameters + + +class DatasetResource(AzureEntityResource): + """Dataset resource type. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + :ivar etag: Resource Etag. + :vartype etag: str + :param properties: Required. Dataset properties. + :type properties: ~azure.synapse.artifacts.models.Dataset + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'Dataset'}, + } + + def __init__( + self, + *, + properties: "Dataset", + **kwargs + ): + super(DatasetResource, self).__init__(**kwargs) + self.properties = properties + + +class DatasetSchemaDataElement(msrest.serialization.Model): + """Columns that define the physical type schema of the dataset. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Name of the schema column. Type: string (or Expression with resultType string). + :type name: object + :param type: Type of the schema column. Type: string (or Expression with resultType string). + :type type: object + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + name: Optional[object] = None, + type: Optional[object] = None, + **kwargs + ): + super(DatasetSchemaDataElement, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.name = name + self.type = type + + +class DatasetZipDeflateCompression(DatasetCompression): + """The ZipDeflate compression method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset compression.Constant filled by server. + :type type: str + :param level: The ZipDeflate compression level. Possible values include: "Optimal", "Fastest". + :type level: str or ~azure.synapse.artifacts.models.DatasetCompressionLevel + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'str'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + level: Optional[Union[str, "DatasetCompressionLevel"]] = None, + **kwargs + ): + super(DatasetZipDeflateCompression, self).__init__(additional_properties=additional_properties, **kwargs) + self.type = 'ZipDeflate' # type: str + self.level = level + + +class Db2LinkedService(LinkedService): + """Linked service for DB2 data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param server: Required. Server name for connection. Type: string (or Expression with + resultType string). + :type server: object + :param database: Required. Database name for connection. Type: string (or Expression with + resultType string). + :type database: object + :param authentication_type: AuthenticationType to be used for connection. Possible values + include: "Basic". + :type authentication_type: str or ~azure.synapse.artifacts.models.Db2AuthenticationType + :param username: Username for authentication. Type: string (or Expression with resultType + string). + :type username: object + :param password: Password for authentication. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param package_collection: Under where packages are created when querying database. Type: + string (or Expression with resultType string). + :type package_collection: object + :param certificate_common_name: Certificate Common Name when TLS is enabled. Type: string (or + Expression with resultType string). + :type certificate_common_name: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'server': {'required': True}, + 'database': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'package_collection': {'key': 'typeProperties.packageCollection', 'type': 'object'}, + 'certificate_common_name': {'key': 'typeProperties.certificateCommonName', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + *, + server: object, + database: object, + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + authentication_type: Optional[Union[str, "Db2AuthenticationType"]] = None, + username: Optional[object] = None, + password: Optional["SecretBase"] = None, + package_collection: Optional[object] = None, + certificate_common_name: Optional[object] = None, + encrypted_credential: Optional[object] = None, + **kwargs + ): + super(Db2LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'Db2' # type: str + self.server = server + self.database = database + self.authentication_type = authentication_type + self.username = username + self.password = password + self.package_collection = package_collection + self.certificate_common_name = certificate_common_name + self.encrypted_credential = encrypted_credential + + +class Db2Source(TabularSource): + """A copy activity source for Db2 databases. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: Database query. Type: string (or Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(Db2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'Db2Source' # type: str + self.query = query + + +class Db2TableDataset(Dataset): + """The Db2 table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: This property will be retired. Please consider using schema + table + properties instead. + :type table_name: object + :param schema_type_properties_schema: The Db2 schema name. Type: string (or Expression with + resultType string). + :type schema_type_properties_schema: object + :param table: The Db2 table name. Type: string (or Expression with resultType string). + :type table: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__( + self, + *, + linked_service_name: "LinkedServiceReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + table_name: Optional[object] = None, + schema_type_properties_schema: Optional[object] = None, + table: Optional[object] = None, + **kwargs + ): + super(Db2TableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'Db2Table' # type: str + self.table_name = table_name + self.schema_type_properties_schema = schema_type_properties_schema + self.table = table + + +class DeleteActivity(ExecutionActivity): + """Delete activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param linked_service_name: Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param recursive: If true, files or sub-folders under current folder path will be deleted + recursively. Default is false. Type: boolean (or Expression with resultType boolean). + :type recursive: object + :param max_concurrent_connections: The max concurrent connections to connect data source at the + same time. + :type max_concurrent_connections: int + :param enable_logging: Whether to record detailed logs of delete-activity execution. Default + value is false. Type: boolean (or Expression with resultType boolean). + :type enable_logging: object + :param log_storage_settings: Log storage settings customer need to provide when enableLogging + is true. + :type log_storage_settings: ~azure.synapse.artifacts.models.LogStorageSettings + :param dataset: Required. Delete activity dataset reference. + :type dataset: ~azure.synapse.artifacts.models.DatasetReference + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'max_concurrent_connections': {'minimum': 1}, + 'dataset': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'recursive': {'key': 'typeProperties.recursive', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'typeProperties.maxConcurrentConnections', 'type': 'int'}, + 'enable_logging': {'key': 'typeProperties.enableLogging', 'type': 'object'}, + 'log_storage_settings': {'key': 'typeProperties.logStorageSettings', 'type': 'LogStorageSettings'}, + 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, + } + + def __init__( + self, + *, + name: str, + dataset: "DatasetReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + depends_on: Optional[List["ActivityDependency"]] = None, + user_properties: Optional[List["UserProperty"]] = None, + linked_service_name: Optional["LinkedServiceReference"] = None, + policy: Optional["ActivityPolicy"] = None, + recursive: Optional[object] = None, + max_concurrent_connections: Optional[int] = None, + enable_logging: Optional[object] = None, + log_storage_settings: Optional["LogStorageSettings"] = None, + **kwargs + ): + super(DeleteActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.type = 'Delete' # type: str + self.recursive = recursive + self.max_concurrent_connections = max_concurrent_connections + self.enable_logging = enable_logging + self.log_storage_settings = log_storage_settings + self.dataset = dataset + + +class DeleteDataFlowDebugSessionRequest(msrest.serialization.Model): + """Request body structure for deleting data flow debug session. + + :param session_id: The ID of data flow debug session. + :type session_id: str + :param data_flow_name: The data flow which contains the debug session. + :type data_flow_name: str + """ + + _attribute_map = { + 'session_id': {'key': 'sessionId', 'type': 'str'}, + 'data_flow_name': {'key': 'dataFlowName', 'type': 'str'}, + } + + def __init__( + self, + *, + session_id: Optional[str] = None, + data_flow_name: Optional[str] = None, + **kwargs + ): + super(DeleteDataFlowDebugSessionRequest, self).__init__(**kwargs) + self.session_id = session_id + self.data_flow_name = data_flow_name + + +class DelimitedTextDataset(Dataset): + """Delimited text dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param location: The location of the delimited text storage. + :type location: ~azure.synapse.artifacts.models.DatasetLocation + :param column_delimiter: The column delimiter. Type: string (or Expression with resultType + string). + :type column_delimiter: object + :param row_delimiter: The row delimiter. Type: string (or Expression with resultType string). + :type row_delimiter: object + :param encoding_name: The code page name of the preferred encoding. If miss, the default value + is UTF-8, unless BOM denotes another Unicode encoding. Refer to the name column of the table in + the following link to set supported values: + https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with + resultType string). + :type encoding_name: object + :param compression_codec: Possible values include: "bzip2", "gzip", "deflate", "zipDeflate", + "snappy", "lz4". + :type compression_codec: str or ~azure.synapse.artifacts.models.DelimitedTextCompressionCodec + :param compression_level: The data compression method used for DelimitedText. Possible values + include: "Optimal", "Fastest". + :type compression_level: str or ~azure.synapse.artifacts.models.DatasetCompressionLevel + :param quote_char: The quote character. Type: string (or Expression with resultType string). + :type quote_char: object + :param escape_char: The escape character. Type: string (or Expression with resultType string). + :type escape_char: object + :param first_row_as_header: When used as input, treat the first row of data as headers. When + used as output,write the headers into the output as the first row of data. The default value is + false. Type: boolean (or Expression with resultType boolean). + :type first_row_as_header: object + :param null_value: The null value string. Type: string (or Expression with resultType string). + :type null_value: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'column_delimiter': {'key': 'typeProperties.columnDelimiter', 'type': 'object'}, + 'row_delimiter': {'key': 'typeProperties.rowDelimiter', 'type': 'object'}, + 'encoding_name': {'key': 'typeProperties.encodingName', 'type': 'object'}, + 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'str'}, + 'compression_level': {'key': 'typeProperties.compressionLevel', 'type': 'str'}, + 'quote_char': {'key': 'typeProperties.quoteChar', 'type': 'object'}, + 'escape_char': {'key': 'typeProperties.escapeChar', 'type': 'object'}, + 'first_row_as_header': {'key': 'typeProperties.firstRowAsHeader', 'type': 'object'}, + 'null_value': {'key': 'typeProperties.nullValue', 'type': 'object'}, + } + + def __init__( + self, + *, + linked_service_name: "LinkedServiceReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + location: Optional["DatasetLocation"] = None, + column_delimiter: Optional[object] = None, + row_delimiter: Optional[object] = None, + encoding_name: Optional[object] = None, + compression_codec: Optional[Union[str, "DelimitedTextCompressionCodec"]] = None, + compression_level: Optional[Union[str, "DatasetCompressionLevel"]] = None, + quote_char: Optional[object] = None, + escape_char: Optional[object] = None, + first_row_as_header: Optional[object] = None, + null_value: Optional[object] = None, + **kwargs + ): + super(DelimitedTextDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'DelimitedText' # type: str + self.location = location + self.column_delimiter = column_delimiter + self.row_delimiter = row_delimiter + self.encoding_name = encoding_name + self.compression_codec = compression_codec + self.compression_level = compression_level + self.quote_char = quote_char + self.escape_char = escape_char + self.first_row_as_header = first_row_as_header + self.null_value = null_value + + +class FormatReadSettings(msrest.serialization.Model): + """Format read settings. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: DelimitedTextReadSettings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The read setting type.Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'DelimitedTextReadSettings': 'DelimitedTextReadSettings'} + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + **kwargs + ): + super(FormatReadSettings, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = 'FormatReadSettings' # type: str + + +class DelimitedTextReadSettings(FormatReadSettings): + """Delimited text read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The read setting type.Constant filled by server. + :type type: str + :param skip_line_count: Indicates the number of non-empty rows to skip when reading data from + input files. Type: integer (or Expression with resultType integer). + :type skip_line_count: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'skip_line_count': {'key': 'skipLineCount', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + skip_line_count: Optional[object] = None, + **kwargs + ): + super(DelimitedTextReadSettings, self).__init__(additional_properties=additional_properties, **kwargs) + self.type = 'DelimitedTextReadSettings' # type: str + self.skip_line_count = skip_line_count + + +class DelimitedTextSink(CopySink): + """A copy activity DelimitedText sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param store_settings: DelimitedText store settings. + :type store_settings: ~azure.synapse.artifacts.models.StoreWriteSettings + :param format_settings: DelimitedText format settings. + :type format_settings: ~azure.synapse.artifacts.models.DelimitedTextWriteSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextWriteSettings'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + store_settings: Optional["StoreWriteSettings"] = None, + format_settings: Optional["DelimitedTextWriteSettings"] = None, + **kwargs + ): + super(DelimitedTextSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'DelimitedTextSink' # type: str + self.store_settings = store_settings + self.format_settings = format_settings + + +class DelimitedTextSource(CopySource): + """A copy activity DelimitedText source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param store_settings: DelimitedText store settings. + :type store_settings: ~azure.synapse.artifacts.models.StoreReadSettings + :param format_settings: DelimitedText format settings. + :type format_settings: ~azure.synapse.artifacts.models.DelimitedTextReadSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextReadSettings'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + store_settings: Optional["StoreReadSettings"] = None, + format_settings: Optional["DelimitedTextReadSettings"] = None, + **kwargs + ): + super(DelimitedTextSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'DelimitedTextSource' # type: str + self.store_settings = store_settings + self.format_settings = format_settings + + +class DelimitedTextWriteSettings(FormatWriteSettings): + """Delimited text write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The write setting type.Constant filled by server. + :type type: str + :param quote_all_text: Indicates whether string values should always be enclosed with quotes. + Type: boolean (or Expression with resultType boolean). + :type quote_all_text: object + :param file_extension: Required. The file extension used to create the files. Type: string (or + Expression with resultType string). + :type file_extension: object + """ + + _validation = { + 'type': {'required': True}, + 'file_extension': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'quote_all_text': {'key': 'quoteAllText', 'type': 'object'}, + 'file_extension': {'key': 'fileExtension', 'type': 'object'}, + } + + def __init__( + self, + *, + file_extension: object, + additional_properties: Optional[Dict[str, object]] = None, + quote_all_text: Optional[object] = None, + **kwargs + ): + super(DelimitedTextWriteSettings, self).__init__(additional_properties=additional_properties, **kwargs) + self.type = 'DelimitedTextWriteSettings' # type: str + self.quote_all_text = quote_all_text + self.file_extension = file_extension + + +class DependencyReference(msrest.serialization.Model): + """Referenced dependency. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: SelfDependencyTumblingWindowTriggerReference, TriggerDependencyReference. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. The type of dependency reference.Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'SelfDependencyTumblingWindowTriggerReference': 'SelfDependencyTumblingWindowTriggerReference', 'TriggerDependencyReference': 'TriggerDependencyReference'} + } + + def __init__( + self, + **kwargs + ): + super(DependencyReference, self).__init__(**kwargs) + self.type = None # type: Optional[str] + + +class DistcpSettings(msrest.serialization.Model): + """Distcp settings. + + All required parameters must be populated in order to send to Azure. + + :param resource_manager_endpoint: Required. Specifies the Yarn ResourceManager endpoint. Type: + string (or Expression with resultType string). + :type resource_manager_endpoint: object + :param temp_script_path: Required. Specifies an existing folder path which will be used to + store temp Distcp command script. The script file is generated by ADF and will be removed after + Copy job finished. Type: string (or Expression with resultType string). + :type temp_script_path: object + :param distcp_options: Specifies the Distcp options. Type: string (or Expression with + resultType string). + :type distcp_options: object + """ + + _validation = { + 'resource_manager_endpoint': {'required': True}, + 'temp_script_path': {'required': True}, + } + + _attribute_map = { + 'resource_manager_endpoint': {'key': 'resourceManagerEndpoint', 'type': 'object'}, + 'temp_script_path': {'key': 'tempScriptPath', 'type': 'object'}, + 'distcp_options': {'key': 'distcpOptions', 'type': 'object'}, + } + + def __init__( + self, + *, + resource_manager_endpoint: object, + temp_script_path: object, + distcp_options: Optional[object] = None, + **kwargs + ): + super(DistcpSettings, self).__init__(**kwargs) + self.resource_manager_endpoint = resource_manager_endpoint + self.temp_script_path = temp_script_path + self.distcp_options = distcp_options + + +class DocumentDbCollectionDataset(Dataset): + """Microsoft Azure Document Database Collection dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param collection_name: Required. Document Database collection name. Type: string (or + Expression with resultType string). + :type collection_name: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + 'collection_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'collection_name': {'key': 'typeProperties.collectionName', 'type': 'object'}, + } + + def __init__( + self, + *, + linked_service_name: "LinkedServiceReference", + collection_name: object, + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + **kwargs + ): + super(DocumentDbCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'DocumentDbCollection' # type: str + self.collection_name = collection_name + + +class DocumentDbCollectionSink(CopySink): + """A copy activity Document Database Collection sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param nesting_separator: Nested properties separator. Default is . (dot). Type: string (or + Expression with resultType string). + :type nesting_separator: object + :param write_behavior: Describes how to write data to Azure Cosmos DB. Type: string (or + Expression with resultType string). Allowed values: insert and upsert. + :type write_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + nesting_separator: Optional[object] = None, + write_behavior: Optional[object] = None, + **kwargs + ): + super(DocumentDbCollectionSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'DocumentDbCollectionSink' # type: str + self.nesting_separator = nesting_separator + self.write_behavior = write_behavior + + +class DocumentDbCollectionSource(CopySource): + """A copy activity Document Database Collection source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query: Documents query. Type: string (or Expression with resultType string). + :type query: object + :param nesting_separator: Nested properties separator. Type: string (or Expression with + resultType string). + :type nesting_separator: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query: Optional[object] = None, + nesting_separator: Optional[object] = None, + query_timeout: Optional[object] = None, + **kwargs + ): + super(DocumentDbCollectionSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'DocumentDbCollectionSource' # type: str + self.query = query + self.nesting_separator = nesting_separator + self.query_timeout = query_timeout + + +class DrillLinkedService(LinkedService): + """Drill server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param connection_string: An ODBC connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param pwd: The Azure key vault secret reference of password in connection string. + :type pwd: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + connection_string: Optional[object] = None, + pwd: Optional["AzureKeyVaultSecretReference"] = None, + encrypted_credential: Optional[object] = None, + **kwargs + ): + super(DrillLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'Drill' # type: str + self.connection_string = connection_string + self.pwd = pwd + self.encrypted_credential = encrypted_credential + + +class DrillSource(TabularSource): + """A copy activity Drill server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(DrillSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'DrillSource' # type: str + self.query = query + + +class DrillTableDataset(Dataset): + """Drill server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: This property will be retired. Please consider using schema + table + properties instead. + :type table_name: object + :param table: The table name of the Drill. Type: string (or Expression with resultType string). + :type table: object + :param schema_type_properties_schema: The schema name of the Drill. Type: string (or Expression + with resultType string). + :type schema_type_properties_schema: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__( + self, + *, + linked_service_name: "LinkedServiceReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + table_name: Optional[object] = None, + table: Optional[object] = None, + schema_type_properties_schema: Optional[object] = None, + **kwargs + ): + super(DrillTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'DrillTable' # type: str + self.table_name = table_name + self.table = table + self.schema_type_properties_schema = schema_type_properties_schema + + +class DWCopyCommandDefaultValue(msrest.serialization.Model): + """Default value. + + :param column_name: Column name. Type: object (or Expression with resultType string). + :type column_name: object + :param default_value: The default value of the column. Type: object (or Expression with + resultType string). + :type default_value: object + """ + + _attribute_map = { + 'column_name': {'key': 'columnName', 'type': 'object'}, + 'default_value': {'key': 'defaultValue', 'type': 'object'}, + } + + def __init__( + self, + *, + column_name: Optional[object] = None, + default_value: Optional[object] = None, + **kwargs + ): + super(DWCopyCommandDefaultValue, self).__init__(**kwargs) + self.column_name = column_name + self.default_value = default_value + + +class DWCopyCommandSettings(msrest.serialization.Model): + """DW Copy Command settings. + + :param default_values: Specifies the default values for each target column in SQL DW. The + default values in the property overwrite the DEFAULT constraint set in the DB, and identity + column cannot have a default value. Type: array of objects (or Expression with resultType array + of objects). + :type default_values: list[~azure.synapse.artifacts.models.DWCopyCommandDefaultValue] + :param additional_options: Additional options directly passed to SQL DW in Copy Command. Type: + key value pairs (value should be string type) (or Expression with resultType object). Example: + "additionalOptions": { "MAXERRORS": "1000", "DATEFORMAT": "'ymd'" }. + :type additional_options: dict[str, str] + """ + + _attribute_map = { + 'default_values': {'key': 'defaultValues', 'type': '[DWCopyCommandDefaultValue]'}, + 'additional_options': {'key': 'additionalOptions', 'type': '{str}'}, + } + + def __init__( + self, + *, + default_values: Optional[List["DWCopyCommandDefaultValue"]] = None, + additional_options: Optional[Dict[str, str]] = None, + **kwargs + ): + super(DWCopyCommandSettings, self).__init__(**kwargs) + self.default_values = default_values + self.additional_options = additional_options + + +class DynamicsAXLinkedService(LinkedService): + """Dynamics AX linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param url: Required. The Dynamics AX (or Dynamics 365 Finance and Operations) instance OData + endpoint. + :type url: object + :param service_principal_id: Required. Specify the application's client ID. Type: string (or + Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: Required. Specify the application's key. Mark this field as a + SecureString to store it securely in Data Factory, or reference a secret stored in Azure Key + Vault. Type: string (or Expression with resultType string). + :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase + :param tenant: Required. Specify the tenant information (domain name or tenant ID) under which + your application resides. Retrieve it by hovering the mouse in the top-right corner of the + Azure portal. Type: string (or Expression with resultType string). + :type tenant: object + :param aad_resource_id: Required. Specify the resource you are requesting authorization. Type: + string (or Expression with resultType string). + :type aad_resource_id: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + 'service_principal_id': {'required': True}, + 'service_principal_key': {'required': True}, + 'tenant': {'required': True}, + 'aad_resource_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + *, + url: object, + service_principal_id: object, + service_principal_key: "SecretBase", + tenant: object, + aad_resource_id: object, + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + encrypted_credential: Optional[object] = None, + **kwargs + ): + super(DynamicsAXLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'DynamicsAX' # type: str + self.url = url + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.aad_resource_id = aad_resource_id + self.encrypted_credential = encrypted_credential + + +class DynamicsAXResourceDataset(Dataset): + """The path of the Dynamics AX OData entity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param path: Required. The path of the Dynamics AX OData entity. Type: string (or Expression + with resultType string). + :type path: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + 'path': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'path': {'key': 'typeProperties.path', 'type': 'object'}, + } + + def __init__( + self, + *, + linked_service_name: "LinkedServiceReference", + path: object, + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + **kwargs + ): + super(DynamicsAXResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'DynamicsAXResource' # type: str + self.path = path + + +class DynamicsAXSource(TabularSource): + """A copy activity Dynamics AX source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(DynamicsAXSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'DynamicsAXSource' # type: str + self.query = query + + +class DynamicsCrmEntityDataset(Dataset): + """The Dynamics CRM entity dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param entity_name: The logical name of the entity. Type: string (or Expression with resultType + string). + :type entity_name: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, + } + + def __init__( + self, + *, + linked_service_name: "LinkedServiceReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + entity_name: Optional[object] = None, + **kwargs + ): + super(DynamicsCrmEntityDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'DynamicsCrmEntity' # type: str + self.entity_name = entity_name + + +class DynamicsCrmLinkedService(LinkedService): + """Dynamics CRM linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param deployment_type: Required. The deployment type of the Dynamics CRM instance. 'Online' + for Dynamics CRM Online and 'OnPremisesWithIfd' for Dynamics CRM on-premises with Ifd. Type: + string (or Expression with resultType string). Possible values include: "Online", + "OnPremisesWithIfd". + :type deployment_type: str or ~azure.synapse.artifacts.models.DynamicsDeploymentType + :param host_name: The host name of the on-premises Dynamics CRM server. The property is + required for on-prem and not allowed for online. Type: string (or Expression with resultType + string). + :type host_name: object + :param port: The port of on-premises Dynamics CRM server. The property is required for on-prem + and not allowed for online. Default is 443. Type: integer (or Expression with resultType + integer), minimum: 0. + :type port: object + :param service_uri: The URL to the Microsoft Dynamics CRM server. The property is required for + on-line and not allowed for on-prem. Type: string (or Expression with resultType string). + :type service_uri: object + :param organization_name: The organization name of the Dynamics CRM instance. The property is + required for on-prem and required for online when there are more than one Dynamics CRM + instances associated with the user. Type: string (or Expression with resultType string). + :type organization_name: object + :param authentication_type: Required. The authentication type to connect to Dynamics CRM + server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, + 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or + Expression with resultType string). Possible values include: "Office365", "Ifd", + "AADServicePrincipal". + :type authentication_type: str or ~azure.synapse.artifacts.models.DynamicsAuthenticationType + :param username: User name to access the Dynamics CRM instance. Type: string (or Expression + with resultType string). + :type username: object + :param password: Password to access the Dynamics CRM instance. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param service_principal_id: The client ID of the application in Azure Active Directory used + for Server-To-Server authentication. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_credential_type: The service principal credential type to use in + Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' + for certificate. Type: string (or Expression with resultType string). Possible values include: + "ServicePrincipalKey", "ServicePrincipalCert". + :type service_principal_credential_type: str or + ~azure.synapse.artifacts.models.DynamicsServicePrincipalCredentialType + :param service_principal_credential: The credential of the service principal object in Azure + Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', + servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If + servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only + be AzureKeyVaultSecretReference. + :type service_principal_credential: ~azure.synapse.artifacts.models.SecretBase + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'deployment_type': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, + 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, + 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'str'}, + 'service_principal_credential': {'key': 'typeProperties.servicePrincipalCredential', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + *, + deployment_type: Union[str, "DynamicsDeploymentType"], + authentication_type: Union[str, "DynamicsAuthenticationType"], + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + host_name: Optional[object] = None, + port: Optional[object] = None, + service_uri: Optional[object] = None, + organization_name: Optional[object] = None, + username: Optional[object] = None, + password: Optional["SecretBase"] = None, + service_principal_id: Optional[object] = None, + service_principal_credential_type: Optional[Union[str, "DynamicsServicePrincipalCredentialType"]] = None, + service_principal_credential: Optional["SecretBase"] = None, + encrypted_credential: Optional[object] = None, + **kwargs + ): + super(DynamicsCrmLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'DynamicsCrm' # type: str + self.deployment_type = deployment_type + self.host_name = host_name + self.port = port + self.service_uri = service_uri + self.organization_name = organization_name + self.authentication_type = authentication_type + self.username = username + self.password = password + self.service_principal_id = service_principal_id + self.service_principal_credential_type = service_principal_credential_type + self.service_principal_credential = service_principal_credential + self.encrypted_credential = encrypted_credential + + +class DynamicsCrmSink(CopySink): + """A copy activity Dynamics CRM sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param write_behavior: Required. The write behavior for the operation. Possible values include: + "Upsert". + :type write_behavior: str or ~azure.synapse.artifacts.models.DynamicsSinkWriteBehavior + :param ignore_null_values: The flag indicating whether to ignore null values from input dataset + (except key fields) during write operation. Default is false. Type: boolean (or Expression with + resultType boolean). + :type ignore_null_values: object + :param alternate_key_name: The logical name of the alternate key which will be used when + upserting records. Type: string (or Expression with resultType string). + :type alternate_key_name: object + """ + + _validation = { + 'type': {'required': True}, + 'write_behavior': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, + 'alternate_key_name': {'key': 'alternateKeyName', 'type': 'object'}, + } + + def __init__( + self, + *, + write_behavior: Union[str, "DynamicsSinkWriteBehavior"], + additional_properties: Optional[Dict[str, object]] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + ignore_null_values: Optional[object] = None, + alternate_key_name: Optional[object] = None, + **kwargs + ): + super(DynamicsCrmSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'DynamicsCrmSink' # type: str + self.write_behavior = write_behavior + self.ignore_null_values = ignore_null_values + self.alternate_key_name = alternate_key_name + + +class DynamicsCrmSource(CopySource): + """A copy activity Dynamics CRM source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query: FetchXML is a proprietary query language that is used in Microsoft Dynamics CRM + (online & on-premises). Type: string (or Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(DynamicsCrmSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'DynamicsCrmSource' # type: str + self.query = query + + +class DynamicsEntityDataset(Dataset): + """The Dynamics entity dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param entity_name: The logical name of the entity. Type: string (or Expression with resultType + string). + :type entity_name: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, + } + + def __init__( + self, + *, + linked_service_name: "LinkedServiceReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + entity_name: Optional[object] = None, + **kwargs + ): + super(DynamicsEntityDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'DynamicsEntity' # type: str + self.entity_name = entity_name + + +class DynamicsLinkedService(LinkedService): + """Dynamics linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param deployment_type: Required. The deployment type of the Dynamics instance. 'Online' for + Dynamics Online and 'OnPremisesWithIfd' for Dynamics on-premises with Ifd. Type: string (or + Expression with resultType string). Possible values include: "Online", "OnPremisesWithIfd". + :type deployment_type: str or ~azure.synapse.artifacts.models.DynamicsDeploymentType + :param host_name: The host name of the on-premises Dynamics server. The property is required + for on-prem and not allowed for online. Type: string (or Expression with resultType string). + :type host_name: str + :param port: The port of on-premises Dynamics server. The property is required for on-prem and + not allowed for online. Default is 443. Type: integer (or Expression with resultType integer), + minimum: 0. + :type port: str + :param service_uri: The URL to the Microsoft Dynamics server. The property is required for on- + line and not allowed for on-prem. Type: string (or Expression with resultType string). + :type service_uri: str + :param organization_name: The organization name of the Dynamics instance. The property is + required for on-prem and required for online when there are more than one Dynamics instances + associated with the user. Type: string (or Expression with resultType string). + :type organization_name: str + :param authentication_type: Required. The authentication type to connect to Dynamics server. + 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, 'AADServicePrincipal' + for Server-To-Server authentication in online scenario. Type: string (or Expression with + resultType string). Possible values include: "Office365", "Ifd", "AADServicePrincipal". + :type authentication_type: str or ~azure.synapse.artifacts.models.DynamicsAuthenticationType + :param username: User name to access the Dynamics instance. Type: string (or Expression with + resultType string). + :type username: object + :param password: Password to access the Dynamics instance. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param service_principal_id: The client ID of the application in Azure Active Directory used + for Server-To-Server authentication. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_credential_type: The service principal credential type to use in + Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' + for certificate. Type: string (or Expression with resultType string). Possible values include: + "ServicePrincipalKey", "ServicePrincipalCert". + :type service_principal_credential_type: str or + ~azure.synapse.artifacts.models.DynamicsServicePrincipalCredentialType + :param service_principal_credential: The credential of the service principal object in Azure + Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', + servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If + servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only + be AzureKeyVaultSecretReference. + :type service_principal_credential: ~azure.synapse.artifacts.models.SecretBase + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'deployment_type': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, + 'host_name': {'key': 'typeProperties.hostName', 'type': 'str'}, + 'port': {'key': 'typeProperties.port', 'type': 'str'}, + 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'str'}, + 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'str'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'str'}, + 'service_principal_credential': {'key': 'typeProperties.servicePrincipalCredential', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + *, + deployment_type: Union[str, "DynamicsDeploymentType"], + authentication_type: Union[str, "DynamicsAuthenticationType"], + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + host_name: Optional[str] = None, + port: Optional[str] = None, + service_uri: Optional[str] = None, + organization_name: Optional[str] = None, + username: Optional[object] = None, + password: Optional["SecretBase"] = None, + service_principal_id: Optional[object] = None, + service_principal_credential_type: Optional[Union[str, "DynamicsServicePrincipalCredentialType"]] = None, + service_principal_credential: Optional["SecretBase"] = None, + encrypted_credential: Optional[object] = None, + **kwargs + ): + super(DynamicsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'Dynamics' # type: str + self.deployment_type = deployment_type + self.host_name = host_name + self.port = port + self.service_uri = service_uri + self.organization_name = organization_name + self.authentication_type = authentication_type + self.username = username + self.password = password + self.service_principal_id = service_principal_id + self.service_principal_credential_type = service_principal_credential_type + self.service_principal_credential = service_principal_credential + self.encrypted_credential = encrypted_credential + + +class DynamicsSink(CopySink): + """A copy activity Dynamics sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param write_behavior: Required. The write behavior for the operation. Possible values include: + "Upsert". + :type write_behavior: str or ~azure.synapse.artifacts.models.DynamicsSinkWriteBehavior + :param ignore_null_values: The flag indicating whether ignore null values from input dataset + (except key fields) during write operation. Default is false. Type: boolean (or Expression with + resultType boolean). + :type ignore_null_values: object + :param alternate_key_name: The logical name of the alternate key which will be used when + upserting records. Type: string (or Expression with resultType string). + :type alternate_key_name: object + """ + + _validation = { + 'type': {'required': True}, + 'write_behavior': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, + 'alternate_key_name': {'key': 'alternateKeyName', 'type': 'object'}, + } + + def __init__( + self, + *, + write_behavior: Union[str, "DynamicsSinkWriteBehavior"], + additional_properties: Optional[Dict[str, object]] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + ignore_null_values: Optional[object] = None, + alternate_key_name: Optional[object] = None, + **kwargs + ): + super(DynamicsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'DynamicsSink' # type: str + self.write_behavior = write_behavior + self.ignore_null_values = ignore_null_values + self.alternate_key_name = alternate_key_name + + +class DynamicsSource(CopySource): + """A copy activity Dynamics source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query: FetchXML is a proprietary query language that is used in Microsoft Dynamics + (online & on-premises). Type: string (or Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(DynamicsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'DynamicsSource' # type: str + self.query = query + + +class EloquaLinkedService(LinkedService): + """Eloqua server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param endpoint: Required. The endpoint of the Eloqua server. (i.e. eloqua.example.com). + :type endpoint: object + :param username: Required. The site name and user name of your Eloqua account in the form: + sitename/username. (i.e. Eloqua/Alice). + :type username: object + :param password: The password corresponding to the user name. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using + HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name in the server's + certificate to match the host name of the server when connecting over SSL. The default value is + true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of the server when + connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + 'username': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + *, + endpoint: object, + username: object, + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + password: Optional["SecretBase"] = None, + use_encrypted_endpoints: Optional[object] = None, + use_host_verification: Optional[object] = None, + use_peer_verification: Optional[object] = None, + encrypted_credential: Optional[object] = None, + **kwargs + ): + super(EloquaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'Eloqua' # type: str + self.endpoint = endpoint + self.username = username + self.password = password + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + + +class EloquaObjectDataset(Dataset): + """Eloqua server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: The table name. Type: string (or Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__( + self, + *, + linked_service_name: "LinkedServiceReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + table_name: Optional[object] = None, + **kwargs + ): + super(EloquaObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'EloquaObject' # type: str + self.table_name = table_name + + +class EloquaSource(TabularSource): + """A copy activity Eloqua server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(EloquaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'EloquaSource' # type: str + self.query = query + + +class EntityReference(msrest.serialization.Model): + """The entity reference. + + :param type: The type of this referenced entity. Possible values include: + "IntegrationRuntimeReference", "LinkedServiceReference". + :type type: str or ~azure.synapse.artifacts.models.IntegrationRuntimeEntityReferenceType + :param reference_name: The name of this referenced entity. + :type reference_name: str + """ + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + } + + def __init__( + self, + *, + type: Optional[Union[str, "IntegrationRuntimeEntityReferenceType"]] = None, + reference_name: Optional[str] = None, + **kwargs + ): + super(EntityReference, self).__init__(**kwargs) + self.type = type + self.reference_name = reference_name + + +class ErrorAdditionalInfo(msrest.serialization.Model): + """The resource management error additional info. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar type: The additional info type. + :vartype type: str + :ivar info: The additional info. + :vartype info: object + """ + + _validation = { + 'type': {'readonly': True}, + 'info': {'readonly': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'info': {'key': 'info', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(ErrorAdditionalInfo, self).__init__(**kwargs) + self.type = None + self.info = None + + +class ErrorContract(msrest.serialization.Model): + """Contains details when the response code indicates an error. + + :param error: The error details. + :type error: ~azure.synapse.artifacts.models.ErrorResponse + """ + + _attribute_map = { + 'error': {'key': 'error', 'type': 'ErrorResponse'}, + } + + def __init__( + self, + *, + error: Optional["ErrorResponse"] = None, + **kwargs + ): + super(ErrorContract, self).__init__(**kwargs) + self.error = error + + +class ErrorResponse(msrest.serialization.Model): + """The resource management error response. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar code: The error code. + :vartype code: str + :ivar message: The error message. + :vartype message: str + :ivar target: The error target. + :vartype target: str + :ivar details: The error details. + :vartype details: list[~azure.synapse.artifacts.models.ErrorResponse] + :ivar additional_info: The error additional info. + :vartype additional_info: list[~azure.synapse.artifacts.models.ErrorAdditionalInfo] + """ + + _validation = { + 'code': {'readonly': True}, + 'message': {'readonly': True}, + 'target': {'readonly': True}, + 'details': {'readonly': True}, + 'additional_info': {'readonly': True}, + } + + _attribute_map = { + 'code': {'key': 'code', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + 'target': {'key': 'target', 'type': 'str'}, + 'details': {'key': 'details', 'type': '[ErrorResponse]'}, + 'additional_info': {'key': 'additionalInfo', 'type': '[ErrorAdditionalInfo]'}, + } + + def __init__( + self, + **kwargs + ): + super(ErrorResponse, self).__init__(**kwargs) + self.code = None + self.message = None + self.target = None + self.details = None + self.additional_info = None + + +class EvaluateDataFlowExpressionRequest(msrest.serialization.Model): + """Request body structure for data flow expression preview. + + :param session_id: The ID of data flow debug session. + :type session_id: str + :param data_flow_name: The data flow which contains the debug session. + :type data_flow_name: str + :param stream_name: The output stream name. + :type stream_name: str + :param row_limits: The row limit for preview request. + :type row_limits: int + :param expression: The expression for preview. + :type expression: str + """ + + _attribute_map = { + 'session_id': {'key': 'sessionId', 'type': 'str'}, + 'data_flow_name': {'key': 'dataFlowName', 'type': 'str'}, + 'stream_name': {'key': 'streamName', 'type': 'str'}, + 'row_limits': {'key': 'rowLimits', 'type': 'int'}, + 'expression': {'key': 'expression', 'type': 'str'}, + } + + def __init__( + self, + *, + session_id: Optional[str] = None, + data_flow_name: Optional[str] = None, + stream_name: Optional[str] = None, + row_limits: Optional[int] = None, + expression: Optional[str] = None, + **kwargs + ): + super(EvaluateDataFlowExpressionRequest, self).__init__(**kwargs) + self.session_id = session_id + self.data_flow_name = data_flow_name + self.stream_name = stream_name + self.row_limits = row_limits + self.expression = expression + + +class ExecuteDataFlowActivity(ExecutionActivity): + """Execute data flow activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param linked_service_name: Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param data_flow: Required. Data flow reference. + :type data_flow: ~azure.synapse.artifacts.models.DataFlowReference + :param staging: Staging info for execute data flow activity. + :type staging: ~azure.synapse.artifacts.models.DataFlowStagingInfo + :param integration_runtime: The integration runtime reference. + :type integration_runtime: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param compute: Compute properties for data flow activity. + :type compute: ~azure.synapse.artifacts.models.ExecuteDataFlowActivityTypePropertiesCompute + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'data_flow': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'data_flow': {'key': 'typeProperties.dataFlow', 'type': 'DataFlowReference'}, + 'staging': {'key': 'typeProperties.staging', 'type': 'DataFlowStagingInfo'}, + 'integration_runtime': {'key': 'typeProperties.integrationRuntime', 'type': 'IntegrationRuntimeReference'}, + 'compute': {'key': 'typeProperties.compute', 'type': 'ExecuteDataFlowActivityTypePropertiesCompute'}, + } + + def __init__( + self, + *, + name: str, + data_flow: "DataFlowReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + depends_on: Optional[List["ActivityDependency"]] = None, + user_properties: Optional[List["UserProperty"]] = None, + linked_service_name: Optional["LinkedServiceReference"] = None, + policy: Optional["ActivityPolicy"] = None, + staging: Optional["DataFlowStagingInfo"] = None, + integration_runtime: Optional["IntegrationRuntimeReference"] = None, + compute: Optional["ExecuteDataFlowActivityTypePropertiesCompute"] = None, + **kwargs + ): + super(ExecuteDataFlowActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.type = 'ExecuteDataFlow' # type: str + self.data_flow = data_flow + self.staging = staging + self.integration_runtime = integration_runtime + self.compute = compute + + +class ExecuteDataFlowActivityTypePropertiesCompute(msrest.serialization.Model): + """Compute properties for data flow activity. + + :param compute_type: Compute type of the cluster which will execute data flow job. Possible + values include: "General", "MemoryOptimized", "ComputeOptimized". + :type compute_type: str or ~azure.synapse.artifacts.models.DataFlowComputeType + :param core_count: Core count of the cluster which will execute data flow job. Supported values + are: 8, 16, 32, 48, 80, 144 and 272. + :type core_count: int + """ + + _attribute_map = { + 'compute_type': {'key': 'computeType', 'type': 'str'}, + 'core_count': {'key': 'coreCount', 'type': 'int'}, + } + + def __init__( + self, + *, + compute_type: Optional[Union[str, "DataFlowComputeType"]] = None, + core_count: Optional[int] = None, + **kwargs + ): + super(ExecuteDataFlowActivityTypePropertiesCompute, self).__init__(**kwargs) + self.compute_type = compute_type + self.core_count = core_count + + +class ExecutePipelineActivity(Activity): + """Execute pipeline activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param pipeline: Required. Pipeline reference. + :type pipeline: ~azure.synapse.artifacts.models.PipelineReference + :param parameters: Pipeline parameters. + :type parameters: dict[str, object] + :param wait_on_completion: Defines whether activity execution will wait for the dependent + pipeline execution to finish. Default is false. + :type wait_on_completion: bool + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'pipeline': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'pipeline': {'key': 'typeProperties.pipeline', 'type': 'PipelineReference'}, + 'parameters': {'key': 'typeProperties.parameters', 'type': '{object}'}, + 'wait_on_completion': {'key': 'typeProperties.waitOnCompletion', 'type': 'bool'}, + } + + def __init__( + self, + *, + name: str, + pipeline: "PipelineReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + depends_on: Optional[List["ActivityDependency"]] = None, + user_properties: Optional[List["UserProperty"]] = None, + parameters: Optional[Dict[str, object]] = None, + wait_on_completion: Optional[bool] = None, + **kwargs + ): + super(ExecutePipelineActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.type = 'ExecutePipeline' # type: str + self.pipeline = pipeline + self.parameters = parameters + self.wait_on_completion = wait_on_completion + + +class ExecuteSSISPackageActivity(ExecutionActivity): + """Execute SSIS package activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param linked_service_name: Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param package_location: Required. SSIS package location. + :type package_location: ~azure.synapse.artifacts.models.SSISPackageLocation + :param runtime: Specifies the runtime to execute SSIS package. The value should be "x86" or + "x64". Type: string (or Expression with resultType string). + :type runtime: object + :param logging_level: The logging level of SSIS package execution. Type: string (or Expression + with resultType string). + :type logging_level: object + :param environment_path: The environment path to execute the SSIS package. Type: string (or + Expression with resultType string). + :type environment_path: object + :param execution_credential: The package execution credential. + :type execution_credential: ~azure.synapse.artifacts.models.SSISExecutionCredential + :param connect_via: Required. The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param project_parameters: The project level parameters to execute the SSIS package. + :type project_parameters: dict[str, ~azure.synapse.artifacts.models.SSISExecutionParameter] + :param package_parameters: The package level parameters to execute the SSIS package. + :type package_parameters: dict[str, ~azure.synapse.artifacts.models.SSISExecutionParameter] + :param project_connection_managers: The project level connection managers to execute the SSIS + package. + :type project_connection_managers: dict[str, object] + :param package_connection_managers: The package level connection managers to execute the SSIS + package. + :type package_connection_managers: dict[str, object] + :param property_overrides: The property overrides to execute the SSIS package. + :type property_overrides: dict[str, ~azure.synapse.artifacts.models.SSISPropertyOverride] + :param log_location: SSIS package execution log location. + :type log_location: ~azure.synapse.artifacts.models.SSISLogLocation + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'package_location': {'required': True}, + 'connect_via': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'package_location': {'key': 'typeProperties.packageLocation', 'type': 'SSISPackageLocation'}, + 'runtime': {'key': 'typeProperties.runtime', 'type': 'object'}, + 'logging_level': {'key': 'typeProperties.loggingLevel', 'type': 'object'}, + 'environment_path': {'key': 'typeProperties.environmentPath', 'type': 'object'}, + 'execution_credential': {'key': 'typeProperties.executionCredential', 'type': 'SSISExecutionCredential'}, + 'connect_via': {'key': 'typeProperties.connectVia', 'type': 'IntegrationRuntimeReference'}, + 'project_parameters': {'key': 'typeProperties.projectParameters', 'type': '{SSISExecutionParameter}'}, + 'package_parameters': {'key': 'typeProperties.packageParameters', 'type': '{SSISExecutionParameter}'}, + 'project_connection_managers': {'key': 'typeProperties.projectConnectionManagers', 'type': '{object}'}, + 'package_connection_managers': {'key': 'typeProperties.packageConnectionManagers', 'type': '{object}'}, + 'property_overrides': {'key': 'typeProperties.propertyOverrides', 'type': '{SSISPropertyOverride}'}, + 'log_location': {'key': 'typeProperties.logLocation', 'type': 'SSISLogLocation'}, + } + + def __init__( + self, + *, + name: str, + package_location: "SSISPackageLocation", + connect_via: "IntegrationRuntimeReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + depends_on: Optional[List["ActivityDependency"]] = None, + user_properties: Optional[List["UserProperty"]] = None, + linked_service_name: Optional["LinkedServiceReference"] = None, + policy: Optional["ActivityPolicy"] = None, + runtime: Optional[object] = None, + logging_level: Optional[object] = None, + environment_path: Optional[object] = None, + execution_credential: Optional["SSISExecutionCredential"] = None, + project_parameters: Optional[Dict[str, "SSISExecutionParameter"]] = None, + package_parameters: Optional[Dict[str, "SSISExecutionParameter"]] = None, + project_connection_managers: Optional[Dict[str, object]] = None, + package_connection_managers: Optional[Dict[str, object]] = None, + property_overrides: Optional[Dict[str, "SSISPropertyOverride"]] = None, + log_location: Optional["SSISLogLocation"] = None, + **kwargs + ): + super(ExecuteSSISPackageActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.type = 'ExecuteSSISPackage' # type: str + self.package_location = package_location + self.runtime = runtime + self.logging_level = logging_level + self.environment_path = environment_path + self.execution_credential = execution_credential + self.connect_via = connect_via + self.project_parameters = project_parameters + self.package_parameters = package_parameters + self.project_connection_managers = project_connection_managers + self.package_connection_managers = package_connection_managers + self.property_overrides = property_overrides + self.log_location = log_location + + +class ExposureControlRequest(msrest.serialization.Model): + """The exposure control request. + + :param feature_name: The feature name. + :type feature_name: str + :param feature_type: The feature type. + :type feature_type: str + """ + + _attribute_map = { + 'feature_name': {'key': 'featureName', 'type': 'str'}, + 'feature_type': {'key': 'featureType', 'type': 'str'}, + } + + def __init__( + self, + *, + feature_name: Optional[str] = None, + feature_type: Optional[str] = None, + **kwargs + ): + super(ExposureControlRequest, self).__init__(**kwargs) + self.feature_name = feature_name + self.feature_type = feature_type + + +class ExposureControlResponse(msrest.serialization.Model): + """The exposure control response. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar feature_name: The feature name. + :vartype feature_name: str + :ivar value: The feature value. + :vartype value: str + """ + + _validation = { + 'feature_name': {'readonly': True}, + 'value': {'readonly': True}, + } + + _attribute_map = { + 'feature_name': {'key': 'featureName', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ExposureControlResponse, self).__init__(**kwargs) + self.feature_name = None + self.value = None + + +class Expression(msrest.serialization.Model): + """Azure Synapse expression definition. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Expression type. Possible values include: "Expression". + :type type: str or ~azure.synapse.artifacts.models.ExpressionType + :param value: Required. Expression value. + :type value: str + """ + + _validation = { + 'type': {'required': True}, + 'value': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'str'}, + } + + def __init__( + self, + *, + type: Union[str, "ExpressionType"], + value: str, + **kwargs + ): + super(Expression, self).__init__(**kwargs) + self.type = type + self.value = value + + +class FileServerLinkedService(LinkedService): + """File system linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param host: Required. Host name of the server. Type: string (or Expression with resultType + string). + :type host: object + :param user_id: User ID to logon the server. Type: string (or Expression with resultType + string). + :type user_id: object + :param password: Password to logon the server. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'user_id': {'key': 'typeProperties.userId', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + *, + host: object, + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + user_id: Optional[object] = None, + password: Optional["SecretBase"] = None, + encrypted_credential: Optional[object] = None, + **kwargs + ): + super(FileServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'FileServer' # type: str + self.host = host + self.user_id = user_id + self.password = password + self.encrypted_credential = encrypted_credential + + +class FileServerLocation(DatasetLocation): + """The location of file server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location.Constant filled by server. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + folder_path: Optional[object] = None, + file_name: Optional[object] = None, + **kwargs + ): + super(FileServerLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) + self.type = 'FileServerLocation' # type: str + + +class FileServerReadSettings(StoreReadSettings): + """File server read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The read setting type.Constant filled by server. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :type recursive: object + :param wildcard_folder_path: FileServer wildcardFolderPath. Type: string (or Expression with + resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: FileServer wildcardFileName. Type: string (or Expression with + resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. Type: string (or + Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression + with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + max_concurrent_connections: Optional[object] = None, + recursive: Optional[object] = None, + wildcard_folder_path: Optional[object] = None, + wildcard_file_name: Optional[object] = None, + enable_partition_discovery: Optional[bool] = None, + modified_datetime_start: Optional[object] = None, + modified_datetime_end: Optional[object] = None, + **kwargs + ): + super(FileServerReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'FileServerReadSettings' # type: str + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.enable_partition_discovery = enable_partition_discovery + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end + + +class FileServerWriteSettings(StoreWriteSettings): + """File server write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The write setting type.Constant filled by server. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + max_concurrent_connections: Optional[object] = None, + copy_behavior: Optional[object] = None, + **kwargs + ): + super(FileServerWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + self.type = 'FileServerWriteSettings' # type: str + + +class FileSystemSink(CopySink): + """A copy activity file system sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + copy_behavior: Optional[object] = None, + **kwargs + ): + super(FileSystemSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'FileSystemSink' # type: str + self.copy_behavior = copy_behavior + + +class FileSystemSource(CopySource): + """A copy activity file system source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :type recursive: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + recursive: Optional[object] = None, + **kwargs + ): + super(FileSystemSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'FileSystemSource' # type: str + self.recursive = recursive + + +class FilterActivity(Activity): + """Filter and return results from input array based on the conditions. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param items: Required. Input array on which filter should be applied. + :type items: ~azure.synapse.artifacts.models.Expression + :param condition: Required. Condition to be used for filtering the input. + :type condition: ~azure.synapse.artifacts.models.Expression + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'items': {'required': True}, + 'condition': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'items': {'key': 'typeProperties.items', 'type': 'Expression'}, + 'condition': {'key': 'typeProperties.condition', 'type': 'Expression'}, + } + + def __init__( + self, + *, + name: str, + items: "Expression", + condition: "Expression", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + depends_on: Optional[List["ActivityDependency"]] = None, + user_properties: Optional[List["UserProperty"]] = None, + **kwargs + ): + super(FilterActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.type = 'Filter' # type: str + self.items = items + self.condition = condition + + +class ForEachActivity(Activity): + """This activity is used for iterating over a collection and execute given activities. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param is_sequential: Should the loop be executed in sequence or in parallel (max 50). + :type is_sequential: bool + :param batch_count: Batch count to be used for controlling the number of parallel execution + (when isSequential is set to false). + :type batch_count: int + :param items: Required. Collection to iterate. + :type items: ~azure.synapse.artifacts.models.Expression + :param activities: Required. List of activities to execute . + :type activities: list[~azure.synapse.artifacts.models.Activity] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'batch_count': {'maximum': 50}, + 'items': {'required': True}, + 'activities': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'is_sequential': {'key': 'typeProperties.isSequential', 'type': 'bool'}, + 'batch_count': {'key': 'typeProperties.batchCount', 'type': 'int'}, + 'items': {'key': 'typeProperties.items', 'type': 'Expression'}, + 'activities': {'key': 'typeProperties.activities', 'type': '[Activity]'}, + } + + def __init__( + self, + *, + name: str, + items: "Expression", + activities: List["Activity"], + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + depends_on: Optional[List["ActivityDependency"]] = None, + user_properties: Optional[List["UserProperty"]] = None, + is_sequential: Optional[bool] = None, + batch_count: Optional[int] = None, + **kwargs + ): + super(ForEachActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.type = 'ForEach' # type: str + self.is_sequential = is_sequential + self.batch_count = batch_count + self.items = items + self.activities = activities + + +class FtpReadSettings(StoreReadSettings): + """Ftp read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The read setting type.Constant filled by server. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :type recursive: object + :param wildcard_folder_path: Ftp wildcardFolderPath. Type: string (or Expression with + resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Ftp wildcardFileName. Type: string (or Expression with resultType + string). + :type wildcard_file_name: object + :param use_binary_transfer: Specify whether to use binary transfer mode for FTP stores. + :type use_binary_transfer: bool + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'use_binary_transfer': {'key': 'useBinaryTransfer', 'type': 'bool'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + max_concurrent_connections: Optional[object] = None, + recursive: Optional[object] = None, + wildcard_folder_path: Optional[object] = None, + wildcard_file_name: Optional[object] = None, + use_binary_transfer: Optional[bool] = None, + **kwargs + ): + super(FtpReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'FtpReadSettings' # type: str + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.use_binary_transfer = use_binary_transfer + + +class FtpServerLinkedService(LinkedService): + """A FTP server Linked Service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param host: Required. Host name of the FTP server. Type: string (or Expression with resultType + string). + :type host: object + :param port: The TCP port number that the FTP server uses to listen for client connections. + Default value is 21. Type: integer (or Expression with resultType integer), minimum: 0. + :type port: object + :param authentication_type: The authentication type to be used to connect to the FTP server. + Possible values include: "Basic", "Anonymous". + :type authentication_type: str or ~azure.synapse.artifacts.models.FtpAuthenticationType + :param user_name: Username to logon the FTP server. Type: string (or Expression with resultType + string). + :type user_name: object + :param password: Password to logon the FTP server. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + :param enable_ssl: If true, connect to the FTP server over SSL/TLS channel. Default value is + true. Type: boolean (or Expression with resultType boolean). + :type enable_ssl: object + :param enable_server_certificate_validation: If true, validate the FTP server SSL certificate + when connect over SSL/TLS channel. Default value is true. Type: boolean (or Expression with + resultType boolean). + :type enable_server_certificate_validation: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, + } + + def __init__( + self, + *, + host: object, + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + port: Optional[object] = None, + authentication_type: Optional[Union[str, "FtpAuthenticationType"]] = None, + user_name: Optional[object] = None, + password: Optional["SecretBase"] = None, + encrypted_credential: Optional[object] = None, + enable_ssl: Optional[object] = None, + enable_server_certificate_validation: Optional[object] = None, + **kwargs + ): + super(FtpServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'FtpServer' # type: str + self.host = host + self.port = port + self.authentication_type = authentication_type + self.user_name = user_name + self.password = password + self.encrypted_credential = encrypted_credential + self.enable_ssl = enable_ssl + self.enable_server_certificate_validation = enable_server_certificate_validation + + +class FtpServerLocation(DatasetLocation): + """The location of ftp server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location.Constant filled by server. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + folder_path: Optional[object] = None, + file_name: Optional[object] = None, + **kwargs + ): + super(FtpServerLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) + self.type = 'FtpServerLocation' # type: str + + +class GetMetadataActivity(ExecutionActivity): + """Activity to get metadata of dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param linked_service_name: Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param dataset: Required. GetMetadata activity dataset reference. + :type dataset: ~azure.synapse.artifacts.models.DatasetReference + :param field_list: Fields of metadata to get from dataset. + :type field_list: list[object] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'dataset': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, + 'field_list': {'key': 'typeProperties.fieldList', 'type': '[object]'}, + } + + def __init__( + self, + *, + name: str, + dataset: "DatasetReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + depends_on: Optional[List["ActivityDependency"]] = None, + user_properties: Optional[List["UserProperty"]] = None, + linked_service_name: Optional["LinkedServiceReference"] = None, + policy: Optional["ActivityPolicy"] = None, + field_list: Optional[List[object]] = None, + **kwargs + ): + super(GetMetadataActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.type = 'GetMetadata' # type: str + self.dataset = dataset + self.field_list = field_list + + +class GetSsisObjectMetadataRequest(msrest.serialization.Model): + """The request payload of get SSIS object metadata. + + :param metadata_path: Metadata path. + :type metadata_path: str + """ + + _attribute_map = { + 'metadata_path': {'key': 'metadataPath', 'type': 'str'}, + } + + def __init__( + self, + *, + metadata_path: Optional[str] = None, + **kwargs + ): + super(GetSsisObjectMetadataRequest, self).__init__(**kwargs) + self.metadata_path = metadata_path + + +class GoogleAdWordsLinkedService(LinkedService): + """Google AdWords service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param client_customer_id: Required. The Client customer ID of the AdWords account that you + want to fetch report data for. + :type client_customer_id: object + :param developer_token: Required. The developer token associated with the manager account that + you use to grant access to the AdWords API. + :type developer_token: ~azure.synapse.artifacts.models.SecretBase + :param authentication_type: Required. The OAuth 2.0 authentication mechanism used for + authentication. ServiceAuthentication can only be used on self-hosted IR. Possible values + include: "ServiceAuthentication", "UserAuthentication". + :type authentication_type: str or + ~azure.synapse.artifacts.models.GoogleAdWordsAuthenticationType + :param refresh_token: The refresh token obtained from Google for authorizing access to AdWords + for UserAuthentication. + :type refresh_token: ~azure.synapse.artifacts.models.SecretBase + :param client_id: The client id of the google application used to acquire the refresh token. + Type: string (or Expression with resultType string). + :type client_id: object + :param client_secret: The client secret of the google application used to acquire the refresh + token. + :type client_secret: ~azure.synapse.artifacts.models.SecretBase + :param email: The service account email ID that is used for ServiceAuthentication and can only + be used on self-hosted IR. + :type email: object + :param key_file_path: The full path to the .p12 key file that is used to authenticate the + service account email address and can only be used on self-hosted IR. + :type key_file_path: object + :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for + verifying the server when connecting over SSL. This property can only be set when using SSL on + self-hosted IR. The default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust + store or from a specified PEM file. The default value is false. + :type use_system_trust_store: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'client_customer_id': {'required': True}, + 'developer_token': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'client_customer_id': {'key': 'typeProperties.clientCustomerID', 'type': 'object'}, + 'developer_token': {'key': 'typeProperties.developerToken', 'type': 'SecretBase'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'email': {'key': 'typeProperties.email', 'type': 'object'}, + 'key_file_path': {'key': 'typeProperties.keyFilePath', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + *, + client_customer_id: object, + developer_token: "SecretBase", + authentication_type: Union[str, "GoogleAdWordsAuthenticationType"], + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + refresh_token: Optional["SecretBase"] = None, + client_id: Optional[object] = None, + client_secret: Optional["SecretBase"] = None, + email: Optional[object] = None, + key_file_path: Optional[object] = None, + trusted_cert_path: Optional[object] = None, + use_system_trust_store: Optional[object] = None, + encrypted_credential: Optional[object] = None, + **kwargs + ): + super(GoogleAdWordsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'GoogleAdWords' # type: str + self.client_customer_id = client_customer_id + self.developer_token = developer_token + self.authentication_type = authentication_type + self.refresh_token = refresh_token + self.client_id = client_id + self.client_secret = client_secret + self.email = email + self.key_file_path = key_file_path + self.trusted_cert_path = trusted_cert_path + self.use_system_trust_store = use_system_trust_store + self.encrypted_credential = encrypted_credential + + +class GoogleAdWordsObjectDataset(Dataset): + """Google AdWords service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: The table name. Type: string (or Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__( + self, + *, + linked_service_name: "LinkedServiceReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + table_name: Optional[object] = None, + **kwargs + ): + super(GoogleAdWordsObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'GoogleAdWordsObject' # type: str + self.table_name = table_name + + +class GoogleAdWordsSource(TabularSource): + """A copy activity Google AdWords service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(GoogleAdWordsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'GoogleAdWordsSource' # type: str + self.query = query + + +class GoogleBigQueryLinkedService(LinkedService): + """Google BigQuery service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param project: Required. The default BigQuery project to query against. + :type project: object + :param additional_projects: A comma-separated list of public BigQuery projects to access. + :type additional_projects: object + :param request_google_drive_scope: Whether to request access to Google Drive. Allowing Google + Drive access enables support for federated tables that combine BigQuery data with data from + Google Drive. The default value is false. + :type request_google_drive_scope: object + :param authentication_type: Required. The OAuth 2.0 authentication mechanism used for + authentication. ServiceAuthentication can only be used on self-hosted IR. Possible values + include: "ServiceAuthentication", "UserAuthentication". + :type authentication_type: str or + ~azure.synapse.artifacts.models.GoogleBigQueryAuthenticationType + :param refresh_token: The refresh token obtained from Google for authorizing access to BigQuery + for UserAuthentication. + :type refresh_token: ~azure.synapse.artifacts.models.SecretBase + :param client_id: The client id of the google application used to acquire the refresh token. + Type: string (or Expression with resultType string). + :type client_id: object + :param client_secret: The client secret of the google application used to acquire the refresh + token. + :type client_secret: ~azure.synapse.artifacts.models.SecretBase + :param email: The service account email ID that is used for ServiceAuthentication and can only + be used on self-hosted IR. + :type email: object + :param key_file_path: The full path to the .p12 key file that is used to authenticate the + service account email address and can only be used on self-hosted IR. + :type key_file_path: object + :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for + verifying the server when connecting over SSL. This property can only be set when using SSL on + self-hosted IR. The default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust + store or from a specified PEM file. The default value is false. + :type use_system_trust_store: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'project': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'project': {'key': 'typeProperties.project', 'type': 'object'}, + 'additional_projects': {'key': 'typeProperties.additionalProjects', 'type': 'object'}, + 'request_google_drive_scope': {'key': 'typeProperties.requestGoogleDriveScope', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'email': {'key': 'typeProperties.email', 'type': 'object'}, + 'key_file_path': {'key': 'typeProperties.keyFilePath', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + *, + project: object, + authentication_type: Union[str, "GoogleBigQueryAuthenticationType"], + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + additional_projects: Optional[object] = None, + request_google_drive_scope: Optional[object] = None, + refresh_token: Optional["SecretBase"] = None, + client_id: Optional[object] = None, + client_secret: Optional["SecretBase"] = None, + email: Optional[object] = None, + key_file_path: Optional[object] = None, + trusted_cert_path: Optional[object] = None, + use_system_trust_store: Optional[object] = None, + encrypted_credential: Optional[object] = None, + **kwargs + ): + super(GoogleBigQueryLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'GoogleBigQuery' # type: str + self.project = project + self.additional_projects = additional_projects + self.request_google_drive_scope = request_google_drive_scope + self.authentication_type = authentication_type + self.refresh_token = refresh_token + self.client_id = client_id + self.client_secret = client_secret + self.email = email + self.key_file_path = key_file_path + self.trusted_cert_path = trusted_cert_path + self.use_system_trust_store = use_system_trust_store + self.encrypted_credential = encrypted_credential + + +class GoogleBigQueryObjectDataset(Dataset): + """Google BigQuery service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: This property will be retired. Please consider using database + table + properties instead. + :type table_name: object + :param table: The table name of the Google BigQuery. Type: string (or Expression with + resultType string). + :type table: object + :param dataset: The database name of the Google BigQuery. Type: string (or Expression with + resultType string). + :type dataset: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'dataset': {'key': 'typeProperties.dataset', 'type': 'object'}, + } + + def __init__( + self, + *, + linked_service_name: "LinkedServiceReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + table_name: Optional[object] = None, + table: Optional[object] = None, + dataset: Optional[object] = None, + **kwargs + ): + super(GoogleBigQueryObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'GoogleBigQueryObject' # type: str + self.table_name = table_name + self.table = table + self.dataset = dataset + + +class GoogleBigQuerySource(TabularSource): + """A copy activity Google BigQuery service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(GoogleBigQuerySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'GoogleBigQuerySource' # type: str + self.query = query + + +class GoogleCloudStorageLinkedService(LinkedService): + """Linked service for Google Cloud Storage. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param access_key_id: The access key identifier of the Google Cloud Storage Identity and Access + Management (IAM) user. Type: string (or Expression with resultType string). + :type access_key_id: object + :param secret_access_key: The secret access key of the Google Cloud Storage Identity and Access + Management (IAM) user. + :type secret_access_key: ~azure.synapse.artifacts.models.SecretBase + :param service_url: This value specifies the endpoint to access with the Google Cloud Storage + Connector. This is an optional property; change it only if you want to try a different service + endpoint or want to switch between https and http. Type: string (or Expression with resultType + string). + :type service_url: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, + 'secret_access_key': {'key': 'typeProperties.secretAccessKey', 'type': 'SecretBase'}, + 'service_url': {'key': 'typeProperties.serviceUrl', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + access_key_id: Optional[object] = None, + secret_access_key: Optional["SecretBase"] = None, + service_url: Optional[object] = None, + encrypted_credential: Optional[object] = None, + **kwargs + ): + super(GoogleCloudStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'GoogleCloudStorage' # type: str + self.access_key_id = access_key_id + self.secret_access_key = secret_access_key + self.service_url = service_url + self.encrypted_credential = encrypted_credential + + +class GoogleCloudStorageLocation(DatasetLocation): + """The location of Google Cloud Storage dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location.Constant filled by server. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + string). + :type file_name: object + :param bucket_name: Specify the bucketName of Google Cloud Storage. Type: string (or Expression + with resultType string). + :type bucket_name: object + :param version: Specify the version of Google Cloud Storage. Type: string (or Expression with + resultType string). + :type version: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'bucket_name': {'key': 'bucketName', 'type': 'object'}, + 'version': {'key': 'version', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + folder_path: Optional[object] = None, + file_name: Optional[object] = None, + bucket_name: Optional[object] = None, + version: Optional[object] = None, + **kwargs + ): + super(GoogleCloudStorageLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) + self.type = 'GoogleCloudStorageLocation' # type: str + self.bucket_name = bucket_name + self.version = version + + +class GoogleCloudStorageReadSettings(StoreReadSettings): + """Google Cloud Storage read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The read setting type.Constant filled by server. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :type recursive: object + :param wildcard_folder_path: Google Cloud Storage wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Google Cloud Storage wildcardFileName. Type: string (or Expression + with resultType string). + :type wildcard_file_name: object + :param prefix: The prefix filter for the Google Cloud Storage object name. Type: string (or + Expression with resultType string). + :type prefix: object + :param enable_partition_discovery: Indicates whether to enable partition discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. Type: string (or + Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression + with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'prefix': {'key': 'prefix', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + max_concurrent_connections: Optional[object] = None, + recursive: Optional[object] = None, + wildcard_folder_path: Optional[object] = None, + wildcard_file_name: Optional[object] = None, + prefix: Optional[object] = None, + enable_partition_discovery: Optional[bool] = None, + modified_datetime_start: Optional[object] = None, + modified_datetime_end: Optional[object] = None, + **kwargs + ): + super(GoogleCloudStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'GoogleCloudStorageReadSettings' # type: str + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.prefix = prefix + self.enable_partition_discovery = enable_partition_discovery + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end + + +class GreenplumLinkedService(LinkedService): + """Greenplum Database linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param connection_string: An ODBC connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param pwd: The Azure key vault secret reference of password in connection string. + :type pwd: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + connection_string: Optional[object] = None, + pwd: Optional["AzureKeyVaultSecretReference"] = None, + encrypted_credential: Optional[object] = None, + **kwargs + ): + super(GreenplumLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'Greenplum' # type: str + self.connection_string = connection_string + self.pwd = pwd + self.encrypted_credential = encrypted_credential + + +class GreenplumSource(TabularSource): + """A copy activity Greenplum Database source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(GreenplumSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'GreenplumSource' # type: str + self.query = query + + +class GreenplumTableDataset(Dataset): + """Greenplum Database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: This property will be retired. Please consider using schema + table + properties instead. + :type table_name: object + :param table: The table name of Greenplum. Type: string (or Expression with resultType string). + :type table: object + :param schema_type_properties_schema: The schema name of Greenplum. Type: string (or Expression + with resultType string). + :type schema_type_properties_schema: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__( + self, + *, + linked_service_name: "LinkedServiceReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + table_name: Optional[object] = None, + table: Optional[object] = None, + schema_type_properties_schema: Optional[object] = None, + **kwargs + ): + super(GreenplumTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'GreenplumTable' # type: str + self.table_name = table_name + self.table = table + self.schema_type_properties_schema = schema_type_properties_schema + + +class HBaseLinkedService(LinkedService): + """HBase server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param host: Required. The IP address or host name of the HBase server. (i.e. 192.168.222.160). + :type host: object + :param port: The TCP port that the HBase instance uses to listen for client connections. The + default value is 9090. + :type port: object + :param http_path: The partial URL corresponding to the HBase server. (i.e. + /gateway/sandbox/hbase/version). + :type http_path: object + :param authentication_type: Required. The authentication mechanism to use to connect to the + HBase server. Possible values include: "Anonymous", "Basic". + :type authentication_type: str or ~azure.synapse.artifacts.models.HBaseAuthenticationType + :param username: The user name used to connect to the HBase instance. + :type username: object + :param password: The password corresponding to the user name. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The + default value is false. + :type enable_ssl: object + :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for + verifying the server when connecting over SSL. This property can only be set when using SSL on + self-hosted IR. The default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate + name to match the host name of the server when connecting over SSL. The default value is false. + :type allow_host_name_cn_mismatch: object + :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from + the server. The default value is false. + :type allow_self_signed_server_cert: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, + 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + *, + host: object, + authentication_type: Union[str, "HBaseAuthenticationType"], + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + port: Optional[object] = None, + http_path: Optional[object] = None, + username: Optional[object] = None, + password: Optional["SecretBase"] = None, + enable_ssl: Optional[object] = None, + trusted_cert_path: Optional[object] = None, + allow_host_name_cn_mismatch: Optional[object] = None, + allow_self_signed_server_cert: Optional[object] = None, + encrypted_credential: Optional[object] = None, + **kwargs + ): + super(HBaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'HBase' # type: str + self.host = host + self.port = port + self.http_path = http_path + self.authentication_type = authentication_type + self.username = username + self.password = password + self.enable_ssl = enable_ssl + self.trusted_cert_path = trusted_cert_path + self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch + self.allow_self_signed_server_cert = allow_self_signed_server_cert + self.encrypted_credential = encrypted_credential + + +class HBaseObjectDataset(Dataset): + """HBase server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: The table name. Type: string (or Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__( + self, + *, + linked_service_name: "LinkedServiceReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + table_name: Optional[object] = None, + **kwargs + ): + super(HBaseObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'HBaseObject' # type: str + self.table_name = table_name + + +class HBaseSource(TabularSource): + """A copy activity HBase server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(HBaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'HBaseSource' # type: str + self.query = query + + +class HdfsLinkedService(LinkedService): + """Hadoop Distributed File System (HDFS) linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param url: Required. The URL of the HDFS service endpoint, e.g. + http://myhostname:50070/webhdfs/v1 . Type: string (or Expression with resultType string). + :type url: object + :param authentication_type: Type of authentication used to connect to the HDFS. Possible values + are: Anonymous and Windows. Type: string (or Expression with resultType string). + :type authentication_type: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + :param user_name: User name for Windows authentication. Type: string (or Expression with + resultType string). + :type user_name: object + :param password: Password for Windows authentication. + :type password: ~azure.synapse.artifacts.models.SecretBase + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + } + + def __init__( + self, + *, + url: object, + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + authentication_type: Optional[object] = None, + encrypted_credential: Optional[object] = None, + user_name: Optional[object] = None, + password: Optional["SecretBase"] = None, + **kwargs + ): + super(HdfsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'Hdfs' # type: str + self.url = url + self.authentication_type = authentication_type + self.encrypted_credential = encrypted_credential + self.user_name = user_name + self.password = password + + +class HdfsLocation(DatasetLocation): + """The location of HDFS. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location.Constant filled by server. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + folder_path: Optional[object] = None, + file_name: Optional[object] = None, + **kwargs + ): + super(HdfsLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) + self.type = 'HdfsLocation' # type: str + + +class HdfsReadSettings(StoreReadSettings): + """HDFS read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The read setting type.Constant filled by server. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :type recursive: object + :param wildcard_folder_path: HDFS wildcardFolderPath. Type: string (or Expression with + resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: HDFS wildcardFileName. Type: string (or Expression with resultType + string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. Type: string (or + Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression + with resultType string). + :type modified_datetime_end: object + :param distcp_settings: Specifies Distcp-related settings. + :type distcp_settings: ~azure.synapse.artifacts.models.DistcpSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + 'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + max_concurrent_connections: Optional[object] = None, + recursive: Optional[object] = None, + wildcard_folder_path: Optional[object] = None, + wildcard_file_name: Optional[object] = None, + enable_partition_discovery: Optional[bool] = None, + modified_datetime_start: Optional[object] = None, + modified_datetime_end: Optional[object] = None, + distcp_settings: Optional["DistcpSettings"] = None, + **kwargs + ): + super(HdfsReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'HdfsReadSettings' # type: str + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.enable_partition_discovery = enable_partition_discovery + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end + self.distcp_settings = distcp_settings + + +class HdfsSource(CopySource): + """A copy activity HDFS source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :type recursive: object + :param distcp_settings: Specifies Distcp-related settings. + :type distcp_settings: ~azure.synapse.artifacts.models.DistcpSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + recursive: Optional[object] = None, + distcp_settings: Optional["DistcpSettings"] = None, + **kwargs + ): + super(HdfsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'HdfsSource' # type: str + self.recursive = recursive + self.distcp_settings = distcp_settings + + +class HDInsightHiveActivity(ExecutionActivity): + """HDInsight Hive activity type. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param linked_service_name: Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param storage_linked_services: Storage linked service references. + :type storage_linked_services: list[~azure.synapse.artifacts.models.LinkedServiceReference] + :param arguments: User specified arguments to HDInsightActivity. + :type arguments: list[object] + :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". + :type get_debug_info: str or ~azure.synapse.artifacts.models.HDInsightActivityDebugInfoOption + :param script_path: Script path. Type: string (or Expression with resultType string). + :type script_path: object + :param script_linked_service: Script linked service reference. + :type script_linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference + :param defines: Allows user to specify defines for Hive job request. + :type defines: dict[str, object] + :param variables: User specified arguments under hivevar namespace. + :type variables: list[object] + :param query_timeout: Query timeout value (in minutes). Effective when the HDInsight cluster + is with ESP (Enterprise Security Package). + :type query_timeout: int + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, + 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, + 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, + 'script_path': {'key': 'typeProperties.scriptPath', 'type': 'object'}, + 'script_linked_service': {'key': 'typeProperties.scriptLinkedService', 'type': 'LinkedServiceReference'}, + 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, + 'variables': {'key': 'typeProperties.variables', 'type': '[object]'}, + 'query_timeout': {'key': 'typeProperties.queryTimeout', 'type': 'int'}, + } + + def __init__( + self, + *, + name: str, + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + depends_on: Optional[List["ActivityDependency"]] = None, + user_properties: Optional[List["UserProperty"]] = None, + linked_service_name: Optional["LinkedServiceReference"] = None, + policy: Optional["ActivityPolicy"] = None, + storage_linked_services: Optional[List["LinkedServiceReference"]] = None, + arguments: Optional[List[object]] = None, + get_debug_info: Optional[Union[str, "HDInsightActivityDebugInfoOption"]] = None, + script_path: Optional[object] = None, + script_linked_service: Optional["LinkedServiceReference"] = None, + defines: Optional[Dict[str, object]] = None, + variables: Optional[List[object]] = None, + query_timeout: Optional[int] = None, + **kwargs + ): + super(HDInsightHiveActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.type = 'HDInsightHive' # type: str + self.storage_linked_services = storage_linked_services + self.arguments = arguments + self.get_debug_info = get_debug_info + self.script_path = script_path + self.script_linked_service = script_linked_service + self.defines = defines + self.variables = variables + self.query_timeout = query_timeout + + +class HDInsightLinkedService(LinkedService): + """HDInsight linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param cluster_uri: Required. HDInsight cluster URI. Type: string (or Expression with + resultType string). + :type cluster_uri: object + :param user_name: HDInsight cluster user name. Type: string (or Expression with resultType + string). + :type user_name: object + :param password: HDInsight cluster password. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param linked_service_name: The Azure Storage linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param hcatalog_linked_service_name: A reference to the Azure SQL linked service that points to + the HCatalog database. + :type hcatalog_linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + :param is_esp_enabled: Specify if the HDInsight is created with ESP (Enterprise Security + Package). Type: Boolean. + :type is_esp_enabled: object + :param file_system: Specify the FileSystem if the main storage for the HDInsight is ADLS Gen2. + Type: string (or Expression with resultType string). + :type file_system: object + """ + + _validation = { + 'type': {'required': True}, + 'cluster_uri': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'cluster_uri': {'key': 'typeProperties.clusterUri', 'type': 'object'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, + 'hcatalog_linked_service_name': {'key': 'typeProperties.hcatalogLinkedServiceName', 'type': 'LinkedServiceReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'is_esp_enabled': {'key': 'typeProperties.isEspEnabled', 'type': 'object'}, + 'file_system': {'key': 'typeProperties.fileSystem', 'type': 'object'}, + } + + def __init__( + self, + *, + cluster_uri: object, + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + user_name: Optional[object] = None, + password: Optional["SecretBase"] = None, + linked_service_name: Optional["LinkedServiceReference"] = None, + hcatalog_linked_service_name: Optional["LinkedServiceReference"] = None, + encrypted_credential: Optional[object] = None, + is_esp_enabled: Optional[object] = None, + file_system: Optional[object] = None, + **kwargs + ): + super(HDInsightLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'HDInsight' # type: str + self.cluster_uri = cluster_uri + self.user_name = user_name + self.password = password + self.linked_service_name = linked_service_name + self.hcatalog_linked_service_name = hcatalog_linked_service_name + self.encrypted_credential = encrypted_credential + self.is_esp_enabled = is_esp_enabled + self.file_system = file_system + + +class HDInsightMapReduceActivity(ExecutionActivity): + """HDInsight MapReduce activity type. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param linked_service_name: Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param storage_linked_services: Storage linked service references. + :type storage_linked_services: list[~azure.synapse.artifacts.models.LinkedServiceReference] + :param arguments: User specified arguments to HDInsightActivity. + :type arguments: list[object] + :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". + :type get_debug_info: str or ~azure.synapse.artifacts.models.HDInsightActivityDebugInfoOption + :param class_name: Required. Class name. Type: string (or Expression with resultType string). + :type class_name: object + :param jar_file_path: Required. Jar path. Type: string (or Expression with resultType string). + :type jar_file_path: object + :param jar_linked_service: Jar linked service reference. + :type jar_linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference + :param jar_libs: Jar libs. + :type jar_libs: list[object] + :param defines: Allows user to specify defines for the MapReduce job request. + :type defines: dict[str, object] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'class_name': {'required': True}, + 'jar_file_path': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, + 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, + 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, + 'class_name': {'key': 'typeProperties.className', 'type': 'object'}, + 'jar_file_path': {'key': 'typeProperties.jarFilePath', 'type': 'object'}, + 'jar_linked_service': {'key': 'typeProperties.jarLinkedService', 'type': 'LinkedServiceReference'}, + 'jar_libs': {'key': 'typeProperties.jarLibs', 'type': '[object]'}, + 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, + } + + def __init__( + self, + *, + name: str, + class_name: object, + jar_file_path: object, + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + depends_on: Optional[List["ActivityDependency"]] = None, + user_properties: Optional[List["UserProperty"]] = None, + linked_service_name: Optional["LinkedServiceReference"] = None, + policy: Optional["ActivityPolicy"] = None, + storage_linked_services: Optional[List["LinkedServiceReference"]] = None, + arguments: Optional[List[object]] = None, + get_debug_info: Optional[Union[str, "HDInsightActivityDebugInfoOption"]] = None, + jar_linked_service: Optional["LinkedServiceReference"] = None, + jar_libs: Optional[List[object]] = None, + defines: Optional[Dict[str, object]] = None, + **kwargs + ): + super(HDInsightMapReduceActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.type = 'HDInsightMapReduce' # type: str + self.storage_linked_services = storage_linked_services + self.arguments = arguments + self.get_debug_info = get_debug_info + self.class_name = class_name + self.jar_file_path = jar_file_path + self.jar_linked_service = jar_linked_service + self.jar_libs = jar_libs + self.defines = defines + + +class HDInsightOnDemandLinkedService(LinkedService): + """HDInsight ondemand linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param cluster_size: Required. Number of worker/data nodes in the cluster. Suggestion value: 4. + Type: string (or Expression with resultType string). + :type cluster_size: object + :param time_to_live: Required. The allowed idle time for the on-demand HDInsight cluster. + Specifies how long the on-demand HDInsight cluster stays alive after completion of an activity + run if there are no other active jobs in the cluster. The minimum value is 5 mins. Type: string + (or Expression with resultType string). + :type time_to_live: object + :param version: Required. Version of the HDInsight cluster.  Type: string (or Expression with + resultType string). + :type version: object + :param linked_service_name: Required. Azure Storage linked service to be used by the on-demand + cluster for storing and processing data. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param host_subscription_id: Required. The customer’s subscription to host the cluster. Type: + string (or Expression with resultType string). + :type host_subscription_id: object + :param service_principal_id: The service principal id for the hostSubscriptionId. Type: string + (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The key for the service principal id. + :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase + :param tenant: Required. The Tenant id/name to which the service principal belongs. Type: + string (or Expression with resultType string). + :type tenant: object + :param cluster_resource_group: Required. The resource group where the cluster belongs. Type: + string (or Expression with resultType string). + :type cluster_resource_group: object + :param cluster_name_prefix: The prefix of cluster name, postfix will be distinct with + timestamp. Type: string (or Expression with resultType string). + :type cluster_name_prefix: object + :param cluster_user_name: The username to access the cluster. Type: string (or Expression with + resultType string). + :type cluster_user_name: object + :param cluster_password: The password to access the cluster. + :type cluster_password: ~azure.synapse.artifacts.models.SecretBase + :param cluster_ssh_user_name: The username to SSH remotely connect to cluster’s node (for + Linux). Type: string (or Expression with resultType string). + :type cluster_ssh_user_name: object + :param cluster_ssh_password: The password to SSH remotely connect cluster’s node (for Linux). + :type cluster_ssh_password: ~azure.synapse.artifacts.models.SecretBase + :param additional_linked_service_names: Specifies additional storage accounts for the HDInsight + linked service so that the Data Factory service can register them on your behalf. + :type additional_linked_service_names: + list[~azure.synapse.artifacts.models.LinkedServiceReference] + :param hcatalog_linked_service_name: The name of Azure SQL linked service that point to the + HCatalog database. The on-demand HDInsight cluster is created by using the Azure SQL database + as the metastore. + :type hcatalog_linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param cluster_type: The cluster type. Type: string (or Expression with resultType string). + :type cluster_type: object + :param spark_version: The version of spark if the cluster type is 'spark'. Type: string (or + Expression with resultType string). + :type spark_version: object + :param core_configuration: Specifies the core configuration parameters (as in core-site.xml) + for the HDInsight cluster to be created. + :type core_configuration: object + :param h_base_configuration: Specifies the HBase configuration parameters (hbase-site.xml) for + the HDInsight cluster. + :type h_base_configuration: object + :param hdfs_configuration: Specifies the HDFS configuration parameters (hdfs-site.xml) for the + HDInsight cluster. + :type hdfs_configuration: object + :param hive_configuration: Specifies the hive configuration parameters (hive-site.xml) for the + HDInsight cluster. + :type hive_configuration: object + :param map_reduce_configuration: Specifies the MapReduce configuration parameters (mapred- + site.xml) for the HDInsight cluster. + :type map_reduce_configuration: object + :param oozie_configuration: Specifies the Oozie configuration parameters (oozie-site.xml) for + the HDInsight cluster. + :type oozie_configuration: object + :param storm_configuration: Specifies the Storm configuration parameters (storm-site.xml) for + the HDInsight cluster. + :type storm_configuration: object + :param yarn_configuration: Specifies the Yarn configuration parameters (yarn-site.xml) for the + HDInsight cluster. + :type yarn_configuration: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + :param head_node_size: Specifies the size of the head node for the HDInsight cluster. + :type head_node_size: object + :param data_node_size: Specifies the size of the data node for the HDInsight cluster. + :type data_node_size: object + :param zookeeper_node_size: Specifies the size of the Zoo Keeper node for the HDInsight + cluster. + :type zookeeper_node_size: object + :param script_actions: Custom script actions to run on HDI ondemand cluster once it's up. + Please refer to https://docs.microsoft.com/en-us/azure/hdinsight/hdinsight-hadoop-customize- + cluster-linux?toc=%2Fen-us%2Fazure%2Fhdinsight%2Fr-server%2FTOC.json&bc=%2Fen- + us%2Fazure%2Fbread%2Ftoc.json#understanding-script-actions. + :type script_actions: list[~azure.synapse.artifacts.models.ScriptAction] + :param virtual_network_id: The ARM resource ID for the vNet to which the cluster should be + joined after creation. Type: string (or Expression with resultType string). + :type virtual_network_id: object + :param subnet_name: The ARM resource ID for the subnet in the vNet. If virtualNetworkId was + specified, then this property is required. Type: string (or Expression with resultType string). + :type subnet_name: object + """ + + _validation = { + 'type': {'required': True}, + 'cluster_size': {'required': True}, + 'time_to_live': {'required': True}, + 'version': {'required': True}, + 'linked_service_name': {'required': True}, + 'host_subscription_id': {'required': True}, + 'tenant': {'required': True}, + 'cluster_resource_group': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'cluster_size': {'key': 'typeProperties.clusterSize', 'type': 'object'}, + 'time_to_live': {'key': 'typeProperties.timeToLive', 'type': 'object'}, + 'version': {'key': 'typeProperties.version', 'type': 'object'}, + 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, + 'host_subscription_id': {'key': 'typeProperties.hostSubscriptionId', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'cluster_resource_group': {'key': 'typeProperties.clusterResourceGroup', 'type': 'object'}, + 'cluster_name_prefix': {'key': 'typeProperties.clusterNamePrefix', 'type': 'object'}, + 'cluster_user_name': {'key': 'typeProperties.clusterUserName', 'type': 'object'}, + 'cluster_password': {'key': 'typeProperties.clusterPassword', 'type': 'SecretBase'}, + 'cluster_ssh_user_name': {'key': 'typeProperties.clusterSshUserName', 'type': 'object'}, + 'cluster_ssh_password': {'key': 'typeProperties.clusterSshPassword', 'type': 'SecretBase'}, + 'additional_linked_service_names': {'key': 'typeProperties.additionalLinkedServiceNames', 'type': '[LinkedServiceReference]'}, + 'hcatalog_linked_service_name': {'key': 'typeProperties.hcatalogLinkedServiceName', 'type': 'LinkedServiceReference'}, + 'cluster_type': {'key': 'typeProperties.clusterType', 'type': 'object'}, + 'spark_version': {'key': 'typeProperties.sparkVersion', 'type': 'object'}, + 'core_configuration': {'key': 'typeProperties.coreConfiguration', 'type': 'object'}, + 'h_base_configuration': {'key': 'typeProperties.hBaseConfiguration', 'type': 'object'}, + 'hdfs_configuration': {'key': 'typeProperties.hdfsConfiguration', 'type': 'object'}, + 'hive_configuration': {'key': 'typeProperties.hiveConfiguration', 'type': 'object'}, + 'map_reduce_configuration': {'key': 'typeProperties.mapReduceConfiguration', 'type': 'object'}, + 'oozie_configuration': {'key': 'typeProperties.oozieConfiguration', 'type': 'object'}, + 'storm_configuration': {'key': 'typeProperties.stormConfiguration', 'type': 'object'}, + 'yarn_configuration': {'key': 'typeProperties.yarnConfiguration', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'head_node_size': {'key': 'typeProperties.headNodeSize', 'type': 'object'}, + 'data_node_size': {'key': 'typeProperties.dataNodeSize', 'type': 'object'}, + 'zookeeper_node_size': {'key': 'typeProperties.zookeeperNodeSize', 'type': 'object'}, + 'script_actions': {'key': 'typeProperties.scriptActions', 'type': '[ScriptAction]'}, + 'virtual_network_id': {'key': 'typeProperties.virtualNetworkId', 'type': 'object'}, + 'subnet_name': {'key': 'typeProperties.subnetName', 'type': 'object'}, + } + + def __init__( + self, + *, + cluster_size: object, + time_to_live: object, + version: object, + linked_service_name: "LinkedServiceReference", + host_subscription_id: object, + tenant: object, + cluster_resource_group: object, + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + service_principal_id: Optional[object] = None, + service_principal_key: Optional["SecretBase"] = None, + cluster_name_prefix: Optional[object] = None, + cluster_user_name: Optional[object] = None, + cluster_password: Optional["SecretBase"] = None, + cluster_ssh_user_name: Optional[object] = None, + cluster_ssh_password: Optional["SecretBase"] = None, + additional_linked_service_names: Optional[List["LinkedServiceReference"]] = None, + hcatalog_linked_service_name: Optional["LinkedServiceReference"] = None, + cluster_type: Optional[object] = None, + spark_version: Optional[object] = None, + core_configuration: Optional[object] = None, + h_base_configuration: Optional[object] = None, + hdfs_configuration: Optional[object] = None, + hive_configuration: Optional[object] = None, + map_reduce_configuration: Optional[object] = None, + oozie_configuration: Optional[object] = None, + storm_configuration: Optional[object] = None, + yarn_configuration: Optional[object] = None, + encrypted_credential: Optional[object] = None, + head_node_size: Optional[object] = None, + data_node_size: Optional[object] = None, + zookeeper_node_size: Optional[object] = None, + script_actions: Optional[List["ScriptAction"]] = None, + virtual_network_id: Optional[object] = None, + subnet_name: Optional[object] = None, + **kwargs + ): + super(HDInsightOnDemandLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'HDInsightOnDemand' # type: str + self.cluster_size = cluster_size + self.time_to_live = time_to_live + self.version = version + self.linked_service_name = linked_service_name + self.host_subscription_id = host_subscription_id + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.cluster_resource_group = cluster_resource_group + self.cluster_name_prefix = cluster_name_prefix + self.cluster_user_name = cluster_user_name + self.cluster_password = cluster_password + self.cluster_ssh_user_name = cluster_ssh_user_name + self.cluster_ssh_password = cluster_ssh_password + self.additional_linked_service_names = additional_linked_service_names + self.hcatalog_linked_service_name = hcatalog_linked_service_name + self.cluster_type = cluster_type + self.spark_version = spark_version + self.core_configuration = core_configuration + self.h_base_configuration = h_base_configuration + self.hdfs_configuration = hdfs_configuration + self.hive_configuration = hive_configuration + self.map_reduce_configuration = map_reduce_configuration + self.oozie_configuration = oozie_configuration + self.storm_configuration = storm_configuration + self.yarn_configuration = yarn_configuration + self.encrypted_credential = encrypted_credential + self.head_node_size = head_node_size + self.data_node_size = data_node_size + self.zookeeper_node_size = zookeeper_node_size + self.script_actions = script_actions + self.virtual_network_id = virtual_network_id + self.subnet_name = subnet_name + + +class HDInsightPigActivity(ExecutionActivity): + """HDInsight Pig activity type. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param linked_service_name: Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param storage_linked_services: Storage linked service references. + :type storage_linked_services: list[~azure.synapse.artifacts.models.LinkedServiceReference] + :param arguments: User specified arguments to HDInsightActivity. Type: array (or Expression + with resultType array). + :type arguments: object + :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". + :type get_debug_info: str or ~azure.synapse.artifacts.models.HDInsightActivityDebugInfoOption + :param script_path: Script path. Type: string (or Expression with resultType string). + :type script_path: object + :param script_linked_service: Script linked service reference. + :type script_linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference + :param defines: Allows user to specify defines for Pig job request. + :type defines: dict[str, object] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, + 'arguments': {'key': 'typeProperties.arguments', 'type': 'object'}, + 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, + 'script_path': {'key': 'typeProperties.scriptPath', 'type': 'object'}, + 'script_linked_service': {'key': 'typeProperties.scriptLinkedService', 'type': 'LinkedServiceReference'}, + 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, + } + + def __init__( + self, + *, + name: str, + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + depends_on: Optional[List["ActivityDependency"]] = None, + user_properties: Optional[List["UserProperty"]] = None, + linked_service_name: Optional["LinkedServiceReference"] = None, + policy: Optional["ActivityPolicy"] = None, + storage_linked_services: Optional[List["LinkedServiceReference"]] = None, + arguments: Optional[object] = None, + get_debug_info: Optional[Union[str, "HDInsightActivityDebugInfoOption"]] = None, + script_path: Optional[object] = None, + script_linked_service: Optional["LinkedServiceReference"] = None, + defines: Optional[Dict[str, object]] = None, + **kwargs + ): + super(HDInsightPigActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.type = 'HDInsightPig' # type: str + self.storage_linked_services = storage_linked_services + self.arguments = arguments + self.get_debug_info = get_debug_info + self.script_path = script_path + self.script_linked_service = script_linked_service + self.defines = defines + + +class HDInsightSparkActivity(ExecutionActivity): + """HDInsight Spark activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param linked_service_name: Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param root_path: Required. The root path in 'sparkJobLinkedService' for all the job’s files. + Type: string (or Expression with resultType string). + :type root_path: object + :param entry_file_path: Required. The relative path to the root folder of the code/package to + be executed. Type: string (or Expression with resultType string). + :type entry_file_path: object + :param arguments: The user-specified arguments to HDInsightSparkActivity. + :type arguments: list[object] + :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". + :type get_debug_info: str or ~azure.synapse.artifacts.models.HDInsightActivityDebugInfoOption + :param spark_job_linked_service: The storage linked service for uploading the entry file and + dependencies, and for receiving logs. + :type spark_job_linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference + :param class_name: The application's Java/Spark main class. + :type class_name: str + :param proxy_user: The user to impersonate that will execute the job. Type: string (or + Expression with resultType string). + :type proxy_user: object + :param spark_config: Spark configuration property. + :type spark_config: dict[str, object] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'root_path': {'required': True}, + 'entry_file_path': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'root_path': {'key': 'typeProperties.rootPath', 'type': 'object'}, + 'entry_file_path': {'key': 'typeProperties.entryFilePath', 'type': 'object'}, + 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, + 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, + 'spark_job_linked_service': {'key': 'typeProperties.sparkJobLinkedService', 'type': 'LinkedServiceReference'}, + 'class_name': {'key': 'typeProperties.className', 'type': 'str'}, + 'proxy_user': {'key': 'typeProperties.proxyUser', 'type': 'object'}, + 'spark_config': {'key': 'typeProperties.sparkConfig', 'type': '{object}'}, + } + + def __init__( + self, + *, + name: str, + root_path: object, + entry_file_path: object, + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + depends_on: Optional[List["ActivityDependency"]] = None, + user_properties: Optional[List["UserProperty"]] = None, + linked_service_name: Optional["LinkedServiceReference"] = None, + policy: Optional["ActivityPolicy"] = None, + arguments: Optional[List[object]] = None, + get_debug_info: Optional[Union[str, "HDInsightActivityDebugInfoOption"]] = None, + spark_job_linked_service: Optional["LinkedServiceReference"] = None, + class_name: Optional[str] = None, + proxy_user: Optional[object] = None, + spark_config: Optional[Dict[str, object]] = None, + **kwargs + ): + super(HDInsightSparkActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.type = 'HDInsightSpark' # type: str + self.root_path = root_path + self.entry_file_path = entry_file_path + self.arguments = arguments + self.get_debug_info = get_debug_info + self.spark_job_linked_service = spark_job_linked_service + self.class_name = class_name + self.proxy_user = proxy_user + self.spark_config = spark_config + + +class HDInsightStreamingActivity(ExecutionActivity): + """HDInsight streaming activity type. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param linked_service_name: Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param storage_linked_services: Storage linked service references. + :type storage_linked_services: list[~azure.synapse.artifacts.models.LinkedServiceReference] + :param arguments: User specified arguments to HDInsightActivity. + :type arguments: list[object] + :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". + :type get_debug_info: str or ~azure.synapse.artifacts.models.HDInsightActivityDebugInfoOption + :param mapper: Required. Mapper executable name. Type: string (or Expression with resultType + string). + :type mapper: object + :param reducer: Required. Reducer executable name. Type: string (or Expression with resultType + string). + :type reducer: object + :param input: Required. Input blob path. Type: string (or Expression with resultType string). + :type input: object + :param output: Required. Output blob path. Type: string (or Expression with resultType string). + :type output: object + :param file_paths: Required. Paths to streaming job files. Can be directories. + :type file_paths: list[object] + :param file_linked_service: Linked service reference where the files are located. + :type file_linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference + :param combiner: Combiner executable name. Type: string (or Expression with resultType string). + :type combiner: object + :param command_environment: Command line environment values. + :type command_environment: list[object] + :param defines: Allows user to specify defines for streaming job request. + :type defines: dict[str, object] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'mapper': {'required': True}, + 'reducer': {'required': True}, + 'input': {'required': True}, + 'output': {'required': True}, + 'file_paths': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, + 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, + 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, + 'mapper': {'key': 'typeProperties.mapper', 'type': 'object'}, + 'reducer': {'key': 'typeProperties.reducer', 'type': 'object'}, + 'input': {'key': 'typeProperties.input', 'type': 'object'}, + 'output': {'key': 'typeProperties.output', 'type': 'object'}, + 'file_paths': {'key': 'typeProperties.filePaths', 'type': '[object]'}, + 'file_linked_service': {'key': 'typeProperties.fileLinkedService', 'type': 'LinkedServiceReference'}, + 'combiner': {'key': 'typeProperties.combiner', 'type': 'object'}, + 'command_environment': {'key': 'typeProperties.commandEnvironment', 'type': '[object]'}, + 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, + } + + def __init__( + self, + *, + name: str, + mapper: object, + reducer: object, + input: object, + output: object, + file_paths: List[object], + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + depends_on: Optional[List["ActivityDependency"]] = None, + user_properties: Optional[List["UserProperty"]] = None, + linked_service_name: Optional["LinkedServiceReference"] = None, + policy: Optional["ActivityPolicy"] = None, + storage_linked_services: Optional[List["LinkedServiceReference"]] = None, + arguments: Optional[List[object]] = None, + get_debug_info: Optional[Union[str, "HDInsightActivityDebugInfoOption"]] = None, + file_linked_service: Optional["LinkedServiceReference"] = None, + combiner: Optional[object] = None, + command_environment: Optional[List[object]] = None, + defines: Optional[Dict[str, object]] = None, + **kwargs + ): + super(HDInsightStreamingActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.type = 'HDInsightStreaming' # type: str + self.storage_linked_services = storage_linked_services + self.arguments = arguments + self.get_debug_info = get_debug_info + self.mapper = mapper + self.reducer = reducer + self.input = input + self.output = output + self.file_paths = file_paths + self.file_linked_service = file_linked_service + self.combiner = combiner + self.command_environment = command_environment + self.defines = defines + + +class HiveLinkedService(LinkedService): + """Hive Server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param host: Required. IP address or host name of the Hive server, separated by ';' for + multiple hosts (only when serviceDiscoveryMode is enable). + :type host: object + :param port: The TCP port that the Hive server uses to listen for client connections. + :type port: object + :param server_type: The type of Hive server. Possible values include: "HiveServer1", + "HiveServer2", "HiveThriftServer". + :type server_type: str or ~azure.synapse.artifacts.models.HiveServerType + :param thrift_transport_protocol: The transport protocol to use in the Thrift layer. Possible + values include: "Binary", "SASL", "HTTP ". + :type thrift_transport_protocol: str or + ~azure.synapse.artifacts.models.HiveThriftTransportProtocol + :param authentication_type: Required. The authentication method used to access the Hive server. + Possible values include: "Anonymous", "Username", "UsernameAndPassword", + "WindowsAzureHDInsightService". + :type authentication_type: str or ~azure.synapse.artifacts.models.HiveAuthenticationType + :param service_discovery_mode: true to indicate using the ZooKeeper service, false not. + :type service_discovery_mode: object + :param zoo_keeper_name_space: The namespace on ZooKeeper under which Hive Server 2 nodes are + added. + :type zoo_keeper_name_space: object + :param use_native_query: Specifies whether the driver uses native HiveQL queries,or converts + them into an equivalent form in HiveQL. + :type use_native_query: object + :param username: The user name that you use to access Hive Server. + :type username: object + :param password: The password corresponding to the user name that you provided in the Username + field. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param http_path: The partial URL corresponding to the Hive server. + :type http_path: object + :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The + default value is false. + :type enable_ssl: object + :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for + verifying the server when connecting over SSL. This property can only be set when using SSL on + self-hosted IR. The default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust + store or from a specified PEM file. The default value is false. + :type use_system_trust_store: object + :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate + name to match the host name of the server when connecting over SSL. The default value is false. + :type allow_host_name_cn_mismatch: object + :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from + the server. The default value is false. + :type allow_self_signed_server_cert: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'server_type': {'key': 'typeProperties.serverType', 'type': 'str'}, + 'thrift_transport_protocol': {'key': 'typeProperties.thriftTransportProtocol', 'type': 'str'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'service_discovery_mode': {'key': 'typeProperties.serviceDiscoveryMode', 'type': 'object'}, + 'zoo_keeper_name_space': {'key': 'typeProperties.zooKeeperNameSpace', 'type': 'object'}, + 'use_native_query': {'key': 'typeProperties.useNativeQuery', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, + 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + *, + host: object, + authentication_type: Union[str, "HiveAuthenticationType"], + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + port: Optional[object] = None, + server_type: Optional[Union[str, "HiveServerType"]] = None, + thrift_transport_protocol: Optional[Union[str, "HiveThriftTransportProtocol"]] = None, + service_discovery_mode: Optional[object] = None, + zoo_keeper_name_space: Optional[object] = None, + use_native_query: Optional[object] = None, + username: Optional[object] = None, + password: Optional["SecretBase"] = None, + http_path: Optional[object] = None, + enable_ssl: Optional[object] = None, + trusted_cert_path: Optional[object] = None, + use_system_trust_store: Optional[object] = None, + allow_host_name_cn_mismatch: Optional[object] = None, + allow_self_signed_server_cert: Optional[object] = None, + encrypted_credential: Optional[object] = None, + **kwargs + ): + super(HiveLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'Hive' # type: str + self.host = host + self.port = port + self.server_type = server_type + self.thrift_transport_protocol = thrift_transport_protocol + self.authentication_type = authentication_type + self.service_discovery_mode = service_discovery_mode + self.zoo_keeper_name_space = zoo_keeper_name_space + self.use_native_query = use_native_query + self.username = username + self.password = password + self.http_path = http_path + self.enable_ssl = enable_ssl + self.trusted_cert_path = trusted_cert_path + self.use_system_trust_store = use_system_trust_store + self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch + self.allow_self_signed_server_cert = allow_self_signed_server_cert + self.encrypted_credential = encrypted_credential + + +class HiveObjectDataset(Dataset): + """Hive Server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: This property will be retired. Please consider using schema + table + properties instead. + :type table_name: object + :param table: The table name of the Hive. Type: string (or Expression with resultType string). + :type table: object + :param schema_type_properties_schema: The schema name of the Hive. Type: string (or Expression + with resultType string). + :type schema_type_properties_schema: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__( + self, + *, + linked_service_name: "LinkedServiceReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + table_name: Optional[object] = None, + table: Optional[object] = None, + schema_type_properties_schema: Optional[object] = None, + **kwargs + ): + super(HiveObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'HiveObject' # type: str + self.table_name = table_name + self.table = table + self.schema_type_properties_schema = schema_type_properties_schema + + +class HiveSource(TabularSource): + """A copy activity Hive Server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(HiveSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'HiveSource' # type: str + self.query = query + + +class HttpLinkedService(LinkedService): + """Linked service for an HTTP source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param url: Required. The base URL of the HTTP endpoint, e.g. http://www.microsoft.com. Type: + string (or Expression with resultType string). + :type url: object + :param authentication_type: The authentication type to be used to connect to the HTTP server. + Possible values include: "Basic", "Anonymous", "Digest", "Windows", "ClientCertificate". + :type authentication_type: str or ~azure.synapse.artifacts.models.HttpAuthenticationType + :param user_name: User name for Basic, Digest, or Windows authentication. Type: string (or + Expression with resultType string). + :type user_name: object + :param password: Password for Basic, Digest, Windows, or ClientCertificate with + EmbeddedCertData authentication. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param embedded_cert_data: Base64 encoded certificate data for ClientCertificate + authentication. For on-premises copy with ClientCertificate authentication, either + CertThumbprint or EmbeddedCertData/Password should be specified. Type: string (or Expression + with resultType string). + :type embedded_cert_data: object + :param cert_thumbprint: Thumbprint of certificate for ClientCertificate authentication. Only + valid for on-premises copy. For on-premises copy with ClientCertificate authentication, either + CertThumbprint or EmbeddedCertData/Password should be specified. Type: string (or Expression + with resultType string). + :type cert_thumbprint: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + :param enable_server_certificate_validation: If true, validate the HTTPS server SSL + certificate. Default value is true. Type: boolean (or Expression with resultType boolean). + :type enable_server_certificate_validation: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'embedded_cert_data': {'key': 'typeProperties.embeddedCertData', 'type': 'object'}, + 'cert_thumbprint': {'key': 'typeProperties.certThumbprint', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, + } + + def __init__( + self, + *, + url: object, + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + authentication_type: Optional[Union[str, "HttpAuthenticationType"]] = None, + user_name: Optional[object] = None, + password: Optional["SecretBase"] = None, + embedded_cert_data: Optional[object] = None, + cert_thumbprint: Optional[object] = None, + encrypted_credential: Optional[object] = None, + enable_server_certificate_validation: Optional[object] = None, + **kwargs + ): + super(HttpLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'HttpServer' # type: str + self.url = url + self.authentication_type = authentication_type + self.user_name = user_name + self.password = password + self.embedded_cert_data = embedded_cert_data + self.cert_thumbprint = cert_thumbprint + self.encrypted_credential = encrypted_credential + self.enable_server_certificate_validation = enable_server_certificate_validation + + +class HttpReadSettings(StoreReadSettings): + """Sftp read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The read setting type.Constant filled by server. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param request_method: The HTTP method used to call the RESTful API. The default is GET. Type: + string (or Expression with resultType string). + :type request_method: object + :param request_body: The HTTP request body to the RESTful API if requestMethod is POST. Type: + string (or Expression with resultType string). + :type request_body: object + :param additional_headers: The additional HTTP headers in the request to the RESTful API. Type: + string (or Expression with resultType string). + :type additional_headers: object + :param request_timeout: Specifies the timeout for a HTTP client to get HTTP response from HTTP + server. + :type request_timeout: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'request_method': {'key': 'requestMethod', 'type': 'object'}, + 'request_body': {'key': 'requestBody', 'type': 'object'}, + 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, + 'request_timeout': {'key': 'requestTimeout', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + max_concurrent_connections: Optional[object] = None, + request_method: Optional[object] = None, + request_body: Optional[object] = None, + additional_headers: Optional[object] = None, + request_timeout: Optional[object] = None, + **kwargs + ): + super(HttpReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'HttpReadSettings' # type: str + self.request_method = request_method + self.request_body = request_body + self.additional_headers = additional_headers + self.request_timeout = request_timeout + + +class HttpServerLocation(DatasetLocation): + """The location of http server. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location.Constant filled by server. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + string). + :type file_name: object + :param relative_url: Specify the relativeUrl of http server. Type: string (or Expression with + resultType string). + :type relative_url: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'relative_url': {'key': 'relativeUrl', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + folder_path: Optional[object] = None, + file_name: Optional[object] = None, + relative_url: Optional[object] = None, + **kwargs + ): + super(HttpServerLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) + self.type = 'HttpServerLocation' # type: str + self.relative_url = relative_url + + +class HttpSource(CopySource): + """A copy activity source for an HTTP file. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param http_request_timeout: Specifies the timeout for a HTTP client to get HTTP response from + HTTP server. The default value is equivalent to System.Net.HttpWebRequest.Timeout. Type: string + (or Expression with resultType string), pattern: + ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type http_request_timeout: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + http_request_timeout: Optional[object] = None, + **kwargs + ): + super(HttpSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'HttpSource' # type: str + self.http_request_timeout = http_request_timeout + + +class HubspotLinkedService(LinkedService): + """Hubspot Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param client_id: Required. The client ID associated with your Hubspot application. + :type client_id: object + :param client_secret: The client secret associated with your Hubspot application. + :type client_secret: ~azure.synapse.artifacts.models.SecretBase + :param access_token: The access token obtained when initially authenticating your OAuth + integration. + :type access_token: ~azure.synapse.artifacts.models.SecretBase + :param refresh_token: The refresh token obtained when initially authenticating your OAuth + integration. + :type refresh_token: ~azure.synapse.artifacts.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using + HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name in the server's + certificate to match the host name of the server when connecting over SSL. The default value is + true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of the server when + connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'client_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + *, + client_id: object, + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + client_secret: Optional["SecretBase"] = None, + access_token: Optional["SecretBase"] = None, + refresh_token: Optional["SecretBase"] = None, + use_encrypted_endpoints: Optional[object] = None, + use_host_verification: Optional[object] = None, + use_peer_verification: Optional[object] = None, + encrypted_credential: Optional[object] = None, + **kwargs + ): + super(HubspotLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'Hubspot' # type: str + self.client_id = client_id + self.client_secret = client_secret + self.access_token = access_token + self.refresh_token = refresh_token + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + + +class HubspotObjectDataset(Dataset): + """Hubspot Service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: The table name. Type: string (or Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__( + self, + *, + linked_service_name: "LinkedServiceReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + table_name: Optional[object] = None, + **kwargs + ): + super(HubspotObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'HubspotObject' # type: str + self.table_name = table_name + + +class HubspotSource(TabularSource): + """A copy activity Hubspot Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(HubspotSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'HubspotSource' # type: str + self.query = query + + +class IfConditionActivity(Activity): + """This activity evaluates a boolean expression and executes either the activities under the ifTrueActivities property or the ifFalseActivities property depending on the result of the expression. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param expression: Required. An expression that would evaluate to Boolean. This is used to + determine the block of activities (ifTrueActivities or ifFalseActivities) that will be + executed. + :type expression: ~azure.synapse.artifacts.models.Expression + :param if_true_activities: List of activities to execute if expression is evaluated to true. + This is an optional property and if not provided, the activity will exit without any action. + :type if_true_activities: list[~azure.synapse.artifacts.models.Activity] + :param if_false_activities: List of activities to execute if expression is evaluated to false. + This is an optional property and if not provided, the activity will exit without any action. + :type if_false_activities: list[~azure.synapse.artifacts.models.Activity] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'expression': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'expression': {'key': 'typeProperties.expression', 'type': 'Expression'}, + 'if_true_activities': {'key': 'typeProperties.ifTrueActivities', 'type': '[Activity]'}, + 'if_false_activities': {'key': 'typeProperties.ifFalseActivities', 'type': '[Activity]'}, + } + + def __init__( + self, + *, + name: str, + expression: "Expression", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + depends_on: Optional[List["ActivityDependency"]] = None, + user_properties: Optional[List["UserProperty"]] = None, + if_true_activities: Optional[List["Activity"]] = None, + if_false_activities: Optional[List["Activity"]] = None, + **kwargs + ): + super(IfConditionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.type = 'IfCondition' # type: str + self.expression = expression + self.if_true_activities = if_true_activities + self.if_false_activities = if_false_activities + + +class ImpalaLinkedService(LinkedService): + """Impala server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param host: Required. The IP address or host name of the Impala server. (i.e. + 192.168.222.160). + :type host: object + :param port: The TCP port that the Impala server uses to listen for client connections. The + default value is 21050. + :type port: object + :param authentication_type: Required. The authentication type to use. Possible values include: + "Anonymous", "SASLUsername", "UsernameAndPassword". + :type authentication_type: str or ~azure.synapse.artifacts.models.ImpalaAuthenticationType + :param username: The user name used to access the Impala server. The default value is anonymous + when using SASLUsername. + :type username: object + :param password: The password corresponding to the user name when using UsernameAndPassword. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The + default value is false. + :type enable_ssl: object + :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for + verifying the server when connecting over SSL. This property can only be set when using SSL on + self-hosted IR. The default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust + store or from a specified PEM file. The default value is false. + :type use_system_trust_store: object + :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate + name to match the host name of the server when connecting over SSL. The default value is false. + :type allow_host_name_cn_mismatch: object + :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from + the server. The default value is false. + :type allow_self_signed_server_cert: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, + 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + *, + host: object, + authentication_type: Union[str, "ImpalaAuthenticationType"], + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + port: Optional[object] = None, + username: Optional[object] = None, + password: Optional["SecretBase"] = None, + enable_ssl: Optional[object] = None, + trusted_cert_path: Optional[object] = None, + use_system_trust_store: Optional[object] = None, + allow_host_name_cn_mismatch: Optional[object] = None, + allow_self_signed_server_cert: Optional[object] = None, + encrypted_credential: Optional[object] = None, + **kwargs + ): + super(ImpalaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'Impala' # type: str + self.host = host + self.port = port + self.authentication_type = authentication_type + self.username = username + self.password = password + self.enable_ssl = enable_ssl + self.trusted_cert_path = trusted_cert_path + self.use_system_trust_store = use_system_trust_store + self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch + self.allow_self_signed_server_cert = allow_self_signed_server_cert + self.encrypted_credential = encrypted_credential + + +class ImpalaObjectDataset(Dataset): + """Impala server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: This property will be retired. Please consider using schema + table + properties instead. + :type table_name: object + :param table: The table name of the Impala. Type: string (or Expression with resultType + string). + :type table: object + :param schema_type_properties_schema: The schema name of the Impala. Type: string (or + Expression with resultType string). + :type schema_type_properties_schema: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + } + + def __init__( + self, + *, + linked_service_name: "LinkedServiceReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + table_name: Optional[object] = None, + table: Optional[object] = None, + schema_type_properties_schema: Optional[object] = None, + **kwargs + ): + super(ImpalaObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'ImpalaObject' # type: str + self.table_name = table_name + self.table = table + self.schema_type_properties_schema = schema_type_properties_schema + + +class ImpalaSource(TabularSource): + """A copy activity Impala server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(ImpalaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'ImpalaSource' # type: str + self.query = query + + +class InformixLinkedService(LinkedService): + """Informix linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param connection_string: Required. The non-access credential portion of the connection string + as well as an optional encrypted credential. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param authentication_type: Type of authentication used to connect to the Informix as ODBC data + store. Possible values are: Anonymous and Basic. Type: string (or Expression with resultType + string). + :type authentication_type: object + :param credential: The access credential portion of the connection string specified in driver- + specific property-value format. + :type credential: ~azure.synapse.artifacts.models.SecretBase + :param user_name: User name for Basic authentication. Type: string (or Expression with + resultType string). + :type user_name: object + :param password: Password for Basic authentication. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + *, + connection_string: object, + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + authentication_type: Optional[object] = None, + credential: Optional["SecretBase"] = None, + user_name: Optional[object] = None, + password: Optional["SecretBase"] = None, + encrypted_credential: Optional[object] = None, + **kwargs + ): + super(InformixLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'Informix' # type: str + self.connection_string = connection_string + self.authentication_type = authentication_type + self.credential = credential + self.user_name = user_name + self.password = password + self.encrypted_credential = encrypted_credential + + +class InformixSink(CopySink): + """A copy activity Informix sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param pre_copy_script: A query to execute before starting the copy. Type: string (or + Expression with resultType string). + :type pre_copy_script: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + pre_copy_script: Optional[object] = None, + **kwargs + ): + super(InformixSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'InformixSink' # type: str + self.pre_copy_script = pre_copy_script + + +class InformixSource(TabularSource): + """A copy activity source for Informix. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: Database query. Type: string (or Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(InformixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'InformixSource' # type: str + self.query = query + + +class InformixTableDataset(Dataset): + """The Informix table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: The Informix table name. Type: string (or Expression with resultType + string). + :type table_name: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__( + self, + *, + linked_service_name: "LinkedServiceReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + table_name: Optional[object] = None, + **kwargs + ): + super(InformixTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'InformixTable' # type: str + self.table_name = table_name + + +class IntegrationRuntime(msrest.serialization.Model): + """Azure Synapse nested object which serves as a compute resource for activities. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: ManagedIntegrationRuntime, SelfHostedIntegrationRuntime. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of integration runtime.Constant filled by server. Possible values + include: "Managed", "SelfHosted". + :type type: str or ~azure.synapse.artifacts.models.IntegrationRuntimeType + :param description: Integration runtime description. + :type description: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'Managed': 'ManagedIntegrationRuntime', 'SelfHosted': 'SelfHostedIntegrationRuntime'} + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + **kwargs + ): + super(IntegrationRuntime, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = 'IntegrationRuntime' # type: str + self.description = description + + +class IntegrationRuntimeComputeProperties(msrest.serialization.Model): + """The compute resource properties for managed integration runtime. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param location: The location for managed integration runtime. The supported regions could be + found on https://docs.microsoft.com/en-us/azure/data-factory/data-factory-data-movement- + activities. + :type location: str + :param node_size: The node size requirement to managed integration runtime. + :type node_size: str + :param number_of_nodes: The required number of nodes for managed integration runtime. + :type number_of_nodes: int + :param max_parallel_executions_per_node: Maximum parallel executions count per node for managed + integration runtime. + :type max_parallel_executions_per_node: int + :param data_flow_properties: Data flow properties for managed integration runtime. + :type data_flow_properties: + ~azure.synapse.artifacts.models.IntegrationRuntimeDataFlowProperties + :param v_net_properties: VNet properties for managed integration runtime. + :type v_net_properties: ~azure.synapse.artifacts.models.IntegrationRuntimeVNetProperties + """ + + _validation = { + 'number_of_nodes': {'minimum': 1}, + 'max_parallel_executions_per_node': {'minimum': 1}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'location': {'key': 'location', 'type': 'str'}, + 'node_size': {'key': 'nodeSize', 'type': 'str'}, + 'number_of_nodes': {'key': 'numberOfNodes', 'type': 'int'}, + 'max_parallel_executions_per_node': {'key': 'maxParallelExecutionsPerNode', 'type': 'int'}, + 'data_flow_properties': {'key': 'dataFlowProperties', 'type': 'IntegrationRuntimeDataFlowProperties'}, + 'v_net_properties': {'key': 'vNetProperties', 'type': 'IntegrationRuntimeVNetProperties'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + location: Optional[str] = None, + node_size: Optional[str] = None, + number_of_nodes: Optional[int] = None, + max_parallel_executions_per_node: Optional[int] = None, + data_flow_properties: Optional["IntegrationRuntimeDataFlowProperties"] = None, + v_net_properties: Optional["IntegrationRuntimeVNetProperties"] = None, + **kwargs + ): + super(IntegrationRuntimeComputeProperties, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.location = location + self.node_size = node_size + self.number_of_nodes = number_of_nodes + self.max_parallel_executions_per_node = max_parallel_executions_per_node + self.data_flow_properties = data_flow_properties + self.v_net_properties = v_net_properties + + +class IntegrationRuntimeCustomSetupScriptProperties(msrest.serialization.Model): + """Custom setup script properties for a managed dedicated integration runtime. + + :param blob_container_uri: The URI of the Azure blob container that contains the custom setup + script. + :type blob_container_uri: str + :param sas_token: The SAS token of the Azure blob container. + :type sas_token: ~azure.synapse.artifacts.models.SecureString + """ + + _attribute_map = { + 'blob_container_uri': {'key': 'blobContainerUri', 'type': 'str'}, + 'sas_token': {'key': 'sasToken', 'type': 'SecureString'}, + } + + def __init__( + self, + *, + blob_container_uri: Optional[str] = None, + sas_token: Optional["SecureString"] = None, + **kwargs + ): + super(IntegrationRuntimeCustomSetupScriptProperties, self).__init__(**kwargs) + self.blob_container_uri = blob_container_uri + self.sas_token = sas_token + + +class IntegrationRuntimeDataFlowProperties(msrest.serialization.Model): + """Data flow properties for managed integration runtime. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param compute_type: Compute type of the cluster which will execute data flow job. Possible + values include: "General", "MemoryOptimized", "ComputeOptimized". + :type compute_type: str or ~azure.synapse.artifacts.models.DataFlowComputeType + :param core_count: Core count of the cluster which will execute data flow job. Supported values + are: 8, 16, 32, 48, 80, 144 and 272. + :type core_count: int + :param time_to_live: Time to live (in minutes) setting of the cluster which will execute data + flow job. + :type time_to_live: int + """ + + _validation = { + 'time_to_live': {'minimum': 0}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'compute_type': {'key': 'computeType', 'type': 'str'}, + 'core_count': {'key': 'coreCount', 'type': 'int'}, + 'time_to_live': {'key': 'timeToLive', 'type': 'int'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + compute_type: Optional[Union[str, "DataFlowComputeType"]] = None, + core_count: Optional[int] = None, + time_to_live: Optional[int] = None, + **kwargs + ): + super(IntegrationRuntimeDataFlowProperties, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.compute_type = compute_type + self.core_count = core_count + self.time_to_live = time_to_live + + +class IntegrationRuntimeDataProxyProperties(msrest.serialization.Model): + """Data proxy properties for a managed dedicated integration runtime. + + :param connect_via: The self-hosted integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.EntityReference + :param staging_linked_service: The staging linked service reference. + :type staging_linked_service: ~azure.synapse.artifacts.models.EntityReference + :param path: The path to contain the staged data in the Blob storage. + :type path: str + """ + + _attribute_map = { + 'connect_via': {'key': 'connectVia', 'type': 'EntityReference'}, + 'staging_linked_service': {'key': 'stagingLinkedService', 'type': 'EntityReference'}, + 'path': {'key': 'path', 'type': 'str'}, + } + + def __init__( + self, + *, + connect_via: Optional["EntityReference"] = None, + staging_linked_service: Optional["EntityReference"] = None, + path: Optional[str] = None, + **kwargs + ): + super(IntegrationRuntimeDataProxyProperties, self).__init__(**kwargs) + self.connect_via = connect_via + self.staging_linked_service = staging_linked_service + self.path = path + + +class IntegrationRuntimeListResponse(msrest.serialization.Model): + """A list of integration runtime resources. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of integration runtimes. + :type value: list[~azure.synapse.artifacts.models.IntegrationRuntimeResource] + :param next_link: The link to the next page of results, if any remaining results exist. + :type next_link: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[IntegrationRuntimeResource]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + *, + value: List["IntegrationRuntimeResource"], + next_link: Optional[str] = None, + **kwargs + ): + super(IntegrationRuntimeListResponse, self).__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class IntegrationRuntimeReference(msrest.serialization.Model): + """Integration runtime reference type. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Type of integration runtime. Possible values include: + "IntegrationRuntimeReference". + :type type: str or ~azure.synapse.artifacts.models.IntegrationRuntimeReferenceType + :param reference_name: Required. Reference integration runtime name. + :type reference_name: str + :param parameters: Arguments for integration runtime. + :type parameters: dict[str, object] + """ + + _validation = { + 'type': {'required': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{object}'}, + } + + def __init__( + self, + *, + type: Union[str, "IntegrationRuntimeReferenceType"], + reference_name: str, + parameters: Optional[Dict[str, object]] = None, + **kwargs + ): + super(IntegrationRuntimeReference, self).__init__(**kwargs) + self.type = type + self.reference_name = reference_name + self.parameters = parameters + + +class IntegrationRuntimeResource(AzureEntityResource): + """Integration runtime resource type. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + :ivar etag: Resource Etag. + :vartype etag: str + :param properties: Required. Integration runtime properties. + :type properties: ~azure.synapse.artifacts.models.IntegrationRuntime + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'IntegrationRuntime'}, + } + + def __init__( + self, + *, + properties: "IntegrationRuntime", + **kwargs + ): + super(IntegrationRuntimeResource, self).__init__(**kwargs) + self.properties = properties + + +class IntegrationRuntimeSsisCatalogInfo(msrest.serialization.Model): + """Catalog information for managed dedicated integration runtime. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param catalog_server_endpoint: The catalog database server URL. + :type catalog_server_endpoint: str + :param catalog_admin_user_name: The administrator user name of catalog database. + :type catalog_admin_user_name: str + :param catalog_admin_password: The password of the administrator user account of the catalog + database. + :type catalog_admin_password: ~azure.synapse.artifacts.models.SecureString + :param catalog_pricing_tier: The pricing tier for the catalog database. The valid values could + be found in https://azure.microsoft.com/en-us/pricing/details/sql-database/. Possible values + include: "Basic", "Standard", "Premium", "PremiumRS". + :type catalog_pricing_tier: str or + ~azure.synapse.artifacts.models.IntegrationRuntimeSsisCatalogPricingTier + """ + + _validation = { + 'catalog_admin_user_name': {'max_length': 128, 'min_length': 1}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'catalog_server_endpoint': {'key': 'catalogServerEndpoint', 'type': 'str'}, + 'catalog_admin_user_name': {'key': 'catalogAdminUserName', 'type': 'str'}, + 'catalog_admin_password': {'key': 'catalogAdminPassword', 'type': 'SecureString'}, + 'catalog_pricing_tier': {'key': 'catalogPricingTier', 'type': 'str'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + catalog_server_endpoint: Optional[str] = None, + catalog_admin_user_name: Optional[str] = None, + catalog_admin_password: Optional["SecureString"] = None, + catalog_pricing_tier: Optional[Union[str, "IntegrationRuntimeSsisCatalogPricingTier"]] = None, + **kwargs + ): + super(IntegrationRuntimeSsisCatalogInfo, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.catalog_server_endpoint = catalog_server_endpoint + self.catalog_admin_user_name = catalog_admin_user_name + self.catalog_admin_password = catalog_admin_password + self.catalog_pricing_tier = catalog_pricing_tier + + +class IntegrationRuntimeSsisProperties(msrest.serialization.Model): + """SSIS properties for managed integration runtime. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param catalog_info: Catalog information for managed dedicated integration runtime. + :type catalog_info: ~azure.synapse.artifacts.models.IntegrationRuntimeSsisCatalogInfo + :param license_type: License type for bringing your own license scenario. Possible values + include: "BasePrice", "LicenseIncluded". + :type license_type: str or ~azure.synapse.artifacts.models.IntegrationRuntimeLicenseType + :param custom_setup_script_properties: Custom setup script properties for a managed dedicated + integration runtime. + :type custom_setup_script_properties: + ~azure.synapse.artifacts.models.IntegrationRuntimeCustomSetupScriptProperties + :param data_proxy_properties: Data proxy properties for a managed dedicated integration + runtime. + :type data_proxy_properties: + ~azure.synapse.artifacts.models.IntegrationRuntimeDataProxyProperties + :param edition: The edition for the SSIS Integration Runtime. Possible values include: + "Standard", "Enterprise". + :type edition: str or ~azure.synapse.artifacts.models.IntegrationRuntimeEdition + :param express_custom_setup_properties: Custom setup without script properties for a SSIS + integration runtime. + :type express_custom_setup_properties: list[~azure.synapse.artifacts.models.CustomSetupBase] + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'catalog_info': {'key': 'catalogInfo', 'type': 'IntegrationRuntimeSsisCatalogInfo'}, + 'license_type': {'key': 'licenseType', 'type': 'str'}, + 'custom_setup_script_properties': {'key': 'customSetupScriptProperties', 'type': 'IntegrationRuntimeCustomSetupScriptProperties'}, + 'data_proxy_properties': {'key': 'dataProxyProperties', 'type': 'IntegrationRuntimeDataProxyProperties'}, + 'edition': {'key': 'edition', 'type': 'str'}, + 'express_custom_setup_properties': {'key': 'expressCustomSetupProperties', 'type': '[CustomSetupBase]'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + catalog_info: Optional["IntegrationRuntimeSsisCatalogInfo"] = None, + license_type: Optional[Union[str, "IntegrationRuntimeLicenseType"]] = None, + custom_setup_script_properties: Optional["IntegrationRuntimeCustomSetupScriptProperties"] = None, + data_proxy_properties: Optional["IntegrationRuntimeDataProxyProperties"] = None, + edition: Optional[Union[str, "IntegrationRuntimeEdition"]] = None, + express_custom_setup_properties: Optional[List["CustomSetupBase"]] = None, + **kwargs + ): + super(IntegrationRuntimeSsisProperties, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.catalog_info = catalog_info + self.license_type = license_type + self.custom_setup_script_properties = custom_setup_script_properties + self.data_proxy_properties = data_proxy_properties + self.edition = edition + self.express_custom_setup_properties = express_custom_setup_properties + + +class IntegrationRuntimeVNetProperties(msrest.serialization.Model): + """VNet properties for managed integration runtime. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param v_net_id: The ID of the VNet that this integration runtime will join. + :type v_net_id: str + :param subnet: The name of the subnet this integration runtime will join. + :type subnet: str + :param public_i_ps: Resource IDs of the public IP addresses that this integration runtime will + use. + :type public_i_ps: list[str] + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'v_net_id': {'key': 'vNetId', 'type': 'str'}, + 'subnet': {'key': 'subnet', 'type': 'str'}, + 'public_i_ps': {'key': 'publicIPs', 'type': '[str]'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + v_net_id: Optional[str] = None, + subnet: Optional[str] = None, + public_i_ps: Optional[List[str]] = None, + **kwargs + ): + super(IntegrationRuntimeVNetProperties, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.v_net_id = v_net_id + self.subnet = subnet + self.public_i_ps = public_i_ps + + +class JiraLinkedService(LinkedService): + """Jira Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param host: Required. The IP address or host name of the Jira service. (e.g. + jira.example.com). + :type host: object + :param port: The TCP port that the Jira server uses to listen for client connections. The + default value is 443 if connecting through HTTPS, or 8080 if connecting through HTTP. + :type port: object + :param username: Required. The user name that you use to access Jira Service. + :type username: object + :param password: The password corresponding to the user name that you provided in the username + field. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using + HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name in the server's + certificate to match the host name of the server when connecting over SSL. The default value is + true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of the server when + connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'username': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + *, + host: object, + username: object, + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + port: Optional[object] = None, + password: Optional["SecretBase"] = None, + use_encrypted_endpoints: Optional[object] = None, + use_host_verification: Optional[object] = None, + use_peer_verification: Optional[object] = None, + encrypted_credential: Optional[object] = None, + **kwargs + ): + super(JiraLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'Jira' # type: str + self.host = host + self.port = port + self.username = username + self.password = password + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + + +class JiraObjectDataset(Dataset): + """Jira Service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: The table name. Type: string (or Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__( + self, + *, + linked_service_name: "LinkedServiceReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + table_name: Optional[object] = None, + **kwargs + ): + super(JiraObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'JiraObject' # type: str + self.table_name = table_name + + +class JiraSource(TabularSource): + """A copy activity Jira Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(JiraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'JiraSource' # type: str + self.query = query + + +class JsonDataset(Dataset): + """Json dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param location: The location of the json data storage. + :type location: ~azure.synapse.artifacts.models.DatasetLocation + :param encoding_name: The code page name of the preferred encoding. If not specified, the + default value is UTF-8, unless BOM denotes another Unicode encoding. Refer to the name column + of the table in the following link to set supported values: + https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with + resultType string). + :type encoding_name: object + :param compression: The data compression method used for the json dataset. + :type compression: ~azure.synapse.artifacts.models.DatasetCompression + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'encoding_name': {'key': 'typeProperties.encodingName', 'type': 'object'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__( + self, + *, + linked_service_name: "LinkedServiceReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + location: Optional["DatasetLocation"] = None, + encoding_name: Optional[object] = None, + compression: Optional["DatasetCompression"] = None, + **kwargs + ): + super(JsonDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'Json' # type: str + self.location = location + self.encoding_name = encoding_name + self.compression = compression + + +class JsonFormat(DatasetStorageFormat): + """The data stored in JSON format. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage format.Constant filled by server. + :type type: str + :param serializer: Serializer. Type: string (or Expression with resultType string). + :type serializer: object + :param deserializer: Deserializer. Type: string (or Expression with resultType string). + :type deserializer: object + :param file_pattern: File pattern of JSON. To be more specific, the way of separating a + collection of JSON objects. The default value is 'setOfObjects'. It is case-sensitive. Possible + values include: "setOfObjects", "arrayOfObjects". + :type file_pattern: str or ~azure.synapse.artifacts.models.JsonFormatFilePattern + :param nesting_separator: The character used to separate nesting levels. Default value is '.' + (dot). Type: string (or Expression with resultType string). + :type nesting_separator: object + :param encoding_name: The code page name of the preferred encoding. If not provided, the + default value is 'utf-8', unless the byte order mark (BOM) denotes another Unicode encoding. + The full list of supported values can be found in the 'Name' column of the table of encodings + in the following reference: https://go.microsoft.com/fwlink/?linkid=861078. Type: string (or + Expression with resultType string). + :type encoding_name: object + :param json_node_reference: The JSONPath of the JSON array element to be flattened. Example: + "$.ArrayPath". Type: string (or Expression with resultType string). + :type json_node_reference: object + :param json_path_definition: The JSONPath definition for each column mapping with a customized + column name to extract data from JSON file. For fields under root object, start with "$"; for + fields inside the array chosen by jsonNodeReference property, start from the array element. + Example: {"Column1": "$.Column1Path", "Column2": "Column2PathInArray"}. Type: object (or + Expression with resultType object). + :type json_path_definition: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'serializer': {'key': 'serializer', 'type': 'object'}, + 'deserializer': {'key': 'deserializer', 'type': 'object'}, + 'file_pattern': {'key': 'filePattern', 'type': 'str'}, + 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, + 'encoding_name': {'key': 'encodingName', 'type': 'object'}, + 'json_node_reference': {'key': 'jsonNodeReference', 'type': 'object'}, + 'json_path_definition': {'key': 'jsonPathDefinition', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + serializer: Optional[object] = None, + deserializer: Optional[object] = None, + file_pattern: Optional[Union[str, "JsonFormatFilePattern"]] = None, + nesting_separator: Optional[object] = None, + encoding_name: Optional[object] = None, + json_node_reference: Optional[object] = None, + json_path_definition: Optional[object] = None, + **kwargs + ): + super(JsonFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs) + self.type = 'JsonFormat' # type: str + self.file_pattern = file_pattern + self.nesting_separator = nesting_separator + self.encoding_name = encoding_name + self.json_node_reference = json_node_reference + self.json_path_definition = json_path_definition + + +class JsonSink(CopySink): + """A copy activity Json sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param store_settings: Json store settings. + :type store_settings: ~azure.synapse.artifacts.models.StoreWriteSettings + :param format_settings: Json format settings. + :type format_settings: ~azure.synapse.artifacts.models.JsonWriteSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'JsonWriteSettings'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + store_settings: Optional["StoreWriteSettings"] = None, + format_settings: Optional["JsonWriteSettings"] = None, + **kwargs + ): + super(JsonSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'JsonSink' # type: str + self.store_settings = store_settings + self.format_settings = format_settings + + +class JsonSource(CopySource): + """A copy activity Json source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param store_settings: Json store settings. + :type store_settings: ~azure.synapse.artifacts.models.StoreReadSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + store_settings: Optional["StoreReadSettings"] = None, + **kwargs + ): + super(JsonSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'JsonSource' # type: str + self.store_settings = store_settings + + +class JsonWriteSettings(FormatWriteSettings): + """Json write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The write setting type.Constant filled by server. + :type type: str + :param file_pattern: File pattern of JSON. This setting controls the way a collection of JSON + objects will be treated. The default value is 'setOfObjects'. It is case-sensitive. Possible + values include: "setOfObjects", "arrayOfObjects". + :type file_pattern: str or ~azure.synapse.artifacts.models.JsonWriteFilePattern + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'file_pattern': {'key': 'filePattern', 'type': 'str'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + file_pattern: Optional[Union[str, "JsonWriteFilePattern"]] = None, + **kwargs + ): + super(JsonWriteSettings, self).__init__(additional_properties=additional_properties, **kwargs) + self.type = 'JsonWriteSettings' # type: str + self.file_pattern = file_pattern + + +class LibraryRequirements(msrest.serialization.Model): + """Library requirements for a Big Data pool powered by Apache Spark. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar time: The last update time of the library requirements file. + :vartype time: ~datetime.datetime + :param content: The library requirements. + :type content: str + :param filename: The filename of the library requirements file. + :type filename: str + """ + + _validation = { + 'time': {'readonly': True}, + } + + _attribute_map = { + 'time': {'key': 'time', 'type': 'iso-8601'}, + 'content': {'key': 'content', 'type': 'str'}, + 'filename': {'key': 'filename', 'type': 'str'}, + } + + def __init__( + self, + *, + content: Optional[str] = None, + filename: Optional[str] = None, + **kwargs + ): + super(LibraryRequirements, self).__init__(**kwargs) + self.time = None + self.content = content + self.filename = filename + + +class LinkedIntegrationRuntimeType(msrest.serialization.Model): + """The base definition of a linked integration runtime. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: LinkedIntegrationRuntimeKeyAuthorization, LinkedIntegrationRuntimeRbacAuthorization. + + All required parameters must be populated in order to send to Azure. + + :param authorization_type: Required. The authorization type for integration runtime + sharing.Constant filled by server. + :type authorization_type: str + """ + + _validation = { + 'authorization_type': {'required': True}, + } + + _attribute_map = { + 'authorization_type': {'key': 'authorizationType', 'type': 'str'}, + } + + _subtype_map = { + 'authorization_type': {'Key': 'LinkedIntegrationRuntimeKeyAuthorization', 'RBAC': 'LinkedIntegrationRuntimeRbacAuthorization'} + } + + def __init__( + self, + **kwargs + ): + super(LinkedIntegrationRuntimeType, self).__init__(**kwargs) + self.authorization_type = None # type: Optional[str] + + +class LinkedIntegrationRuntimeKeyAuthorization(LinkedIntegrationRuntimeType): + """The key authorization type integration runtime. + + All required parameters must be populated in order to send to Azure. + + :param authorization_type: Required. The authorization type for integration runtime + sharing.Constant filled by server. + :type authorization_type: str + :param key: Required. The key used for authorization. + :type key: ~azure.synapse.artifacts.models.SecureString + """ + + _validation = { + 'authorization_type': {'required': True}, + 'key': {'required': True}, + } + + _attribute_map = { + 'authorization_type': {'key': 'authorizationType', 'type': 'str'}, + 'key': {'key': 'key', 'type': 'SecureString'}, + } + + def __init__( + self, + *, + key: "SecureString", + **kwargs + ): + super(LinkedIntegrationRuntimeKeyAuthorization, self).__init__(**kwargs) + self.authorization_type = 'Key' # type: str + self.key = key + + +class LinkedIntegrationRuntimeRbacAuthorization(LinkedIntegrationRuntimeType): + """The role based access control (RBAC) authorization type integration runtime. + + All required parameters must be populated in order to send to Azure. + + :param authorization_type: Required. The authorization type for integration runtime + sharing.Constant filled by server. + :type authorization_type: str + :param resource_id: Required. The resource identifier of the integration runtime to be shared. + :type resource_id: str + """ + + _validation = { + 'authorization_type': {'required': True}, + 'resource_id': {'required': True}, + } + + _attribute_map = { + 'authorization_type': {'key': 'authorizationType', 'type': 'str'}, + 'resource_id': {'key': 'resourceId', 'type': 'str'}, + } + + def __init__( + self, + *, + resource_id: str, + **kwargs + ): + super(LinkedIntegrationRuntimeRbacAuthorization, self).__init__(**kwargs) + self.authorization_type = 'RBAC' # type: str + self.resource_id = resource_id + + +class LinkedServiceDebugResource(SubResourceDebugResource): + """Linked service debug resource. + + All required parameters must be populated in order to send to Azure. + + :param name: The resource name. + :type name: str + :param properties: Required. Properties of linked service. + :type properties: ~azure.synapse.artifacts.models.LinkedService + """ + + _validation = { + 'properties': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'LinkedService'}, + } + + def __init__( + self, + *, + properties: "LinkedService", + name: Optional[str] = None, + **kwargs + ): + super(LinkedServiceDebugResource, self).__init__(name=name, **kwargs) + self.properties = properties + + +class LinkedServiceListResponse(msrest.serialization.Model): + """A list of linked service resources. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of linked services. + :type value: list[~azure.synapse.artifacts.models.LinkedServiceResource] + :param next_link: The link to the next page of results, if any remaining results exist. + :type next_link: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[LinkedServiceResource]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + *, + value: List["LinkedServiceResource"], + next_link: Optional[str] = None, + **kwargs + ): + super(LinkedServiceListResponse, self).__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class LinkedServiceReference(msrest.serialization.Model): + """Linked service reference type. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Linked service reference type. Possible values include: + "LinkedServiceReference". + :type type: str or ~azure.synapse.artifacts.models.Type + :param reference_name: Required. Reference LinkedService name. + :type reference_name: str + :param parameters: Arguments for LinkedService. + :type parameters: dict[str, object] + """ + + _validation = { + 'type': {'required': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{object}'}, + } + + def __init__( + self, + *, + type: Union[str, "Type"], + reference_name: str, + parameters: Optional[Dict[str, object]] = None, + **kwargs + ): + super(LinkedServiceReference, self).__init__(**kwargs) + self.type = type + self.reference_name = reference_name + self.parameters = parameters + + +class LinkedServiceResource(AzureEntityResource): + """Linked service resource type. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + :ivar etag: Resource Etag. + :vartype etag: str + :param properties: Required. Properties of linked service. + :type properties: ~azure.synapse.artifacts.models.LinkedService + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'LinkedService'}, + } + + def __init__( + self, + *, + properties: "LinkedService", + **kwargs + ): + super(LinkedServiceResource, self).__init__(**kwargs) + self.properties = properties + + +class LogStorageSettings(msrest.serialization.Model): + """Log storage settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param linked_service_name: Required. Log storage linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param path: The path to storage for storing detailed logs of activity execution. Type: string + (or Expression with resultType string). + :type path: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'path': {'key': 'path', 'type': 'object'}, + } + + def __init__( + self, + *, + linked_service_name: "LinkedServiceReference", + additional_properties: Optional[Dict[str, object]] = None, + path: Optional[object] = None, + **kwargs + ): + super(LogStorageSettings, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.linked_service_name = linked_service_name + self.path = path + + +class LookupActivity(ExecutionActivity): + """Lookup activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param linked_service_name: Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param source: Required. Dataset-specific source properties, same as copy activity source. + :type source: ~azure.synapse.artifacts.models.CopySource + :param dataset: Required. Lookup activity dataset reference. + :type dataset: ~azure.synapse.artifacts.models.DatasetReference + :param first_row_only: Whether to return first row or all rows. Default value is true. Type: + boolean (or Expression with resultType boolean). + :type first_row_only: object + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'source': {'required': True}, + 'dataset': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'source': {'key': 'typeProperties.source', 'type': 'CopySource'}, + 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, + 'first_row_only': {'key': 'typeProperties.firstRowOnly', 'type': 'object'}, + } + + def __init__( + self, + *, + name: str, + source: "CopySource", + dataset: "DatasetReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + depends_on: Optional[List["ActivityDependency"]] = None, + user_properties: Optional[List["UserProperty"]] = None, + linked_service_name: Optional["LinkedServiceReference"] = None, + policy: Optional["ActivityPolicy"] = None, + first_row_only: Optional[object] = None, + **kwargs + ): + super(LookupActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.type = 'Lookup' # type: str + self.source = source + self.dataset = dataset + self.first_row_only = first_row_only + + +class MagentoLinkedService(LinkedService): + """Magento server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param host: Required. The URL of the Magento instance. (i.e. 192.168.222.110/magento3). + :type host: object + :param access_token: The access token from Magento. + :type access_token: ~azure.synapse.artifacts.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using + HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name in the server's + certificate to match the host name of the server when connecting over SSL. The default value is + true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of the server when + connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + *, + host: object, + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + access_token: Optional["SecretBase"] = None, + use_encrypted_endpoints: Optional[object] = None, + use_host_verification: Optional[object] = None, + use_peer_verification: Optional[object] = None, + encrypted_credential: Optional[object] = None, + **kwargs + ): + super(MagentoLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'Magento' # type: str + self.host = host + self.access_token = access_token + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + + +class MagentoObjectDataset(Dataset): + """Magento server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: The table name. Type: string (or Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__( + self, + *, + linked_service_name: "LinkedServiceReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + table_name: Optional[object] = None, + **kwargs + ): + super(MagentoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'MagentoObject' # type: str + self.table_name = table_name + + +class MagentoSource(TabularSource): + """A copy activity Magento server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(MagentoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'MagentoSource' # type: str + self.query = query + + +class ManagedIdentity(msrest.serialization.Model): + """The workspace managed identity. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar principal_id: The principal ID of the workspace managed identity. + :vartype principal_id: str + :ivar tenant_id: The tenant ID of the workspace managed identity. + :vartype tenant_id: str + :param type: The type of managed identity for the workspace. Possible values include: "None", + "SystemAssigned". + :type type: str or ~azure.synapse.artifacts.models.ResourceIdentityType + """ + + _validation = { + 'principal_id': {'readonly': True}, + 'tenant_id': {'readonly': True}, + } + + _attribute_map = { + 'principal_id': {'key': 'principalId', 'type': 'str'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__( + self, + *, + type: Optional[Union[str, "ResourceIdentityType"]] = None, + **kwargs + ): + super(ManagedIdentity, self).__init__(**kwargs) + self.principal_id = None + self.tenant_id = None + self.type = type + + +class ManagedIntegrationRuntime(IntegrationRuntime): + """Managed integration runtime, including managed elastic and managed dedicated integration runtimes. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of integration runtime.Constant filled by server. Possible values + include: "Managed", "SelfHosted". + :type type: str or ~azure.synapse.artifacts.models.IntegrationRuntimeType + :param description: Integration runtime description. + :type description: str + :ivar state: Integration runtime state, only valid for managed dedicated integration runtime. + Possible values include: "Initial", "Stopped", "Started", "Starting", "Stopping", + "NeedRegistration", "Online", "Limited", "Offline", "AccessDenied". + :vartype state: str or ~azure.synapse.artifacts.models.IntegrationRuntimeState + :param compute_properties: The compute resource for managed integration runtime. + :type compute_properties: ~azure.synapse.artifacts.models.IntegrationRuntimeComputeProperties + :param ssis_properties: SSIS properties for managed integration runtime. + :type ssis_properties: ~azure.synapse.artifacts.models.IntegrationRuntimeSsisProperties + """ + + _validation = { + 'type': {'required': True}, + 'state': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'state': {'key': 'state', 'type': 'str'}, + 'compute_properties': {'key': 'typeProperties.computeProperties', 'type': 'IntegrationRuntimeComputeProperties'}, + 'ssis_properties': {'key': 'typeProperties.ssisProperties', 'type': 'IntegrationRuntimeSsisProperties'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + compute_properties: Optional["IntegrationRuntimeComputeProperties"] = None, + ssis_properties: Optional["IntegrationRuntimeSsisProperties"] = None, + **kwargs + ): + super(ManagedIntegrationRuntime, self).__init__(additional_properties=additional_properties, description=description, **kwargs) + self.type = 'Managed' # type: str + self.state = None + self.compute_properties = compute_properties + self.ssis_properties = ssis_properties + + +class MappingDataFlow(DataFlow): + """Mapping data flow. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Type of data flow.Constant filled by server. + :type type: str + :param description: The description of the data flow. + :type description: str + :param annotations: List of tags that can be used for describing the data flow. + :type annotations: list[object] + :param folder: The folder that this data flow is in. If not specified, Data flow will appear at + the root level. + :type folder: ~azure.synapse.artifacts.models.DataFlowFolder + :param sources: List of sources in data flow. + :type sources: list[~azure.synapse.artifacts.models.DataFlowSource] + :param sinks: List of sinks in data flow. + :type sinks: list[~azure.synapse.artifacts.models.DataFlowSink] + :param transformations: List of transformations in data flow. + :type transformations: list[~azure.synapse.artifacts.models.Transformation] + :param script: DataFlow script. + :type script: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DataFlowFolder'}, + 'sources': {'key': 'typeProperties.sources', 'type': '[DataFlowSource]'}, + 'sinks': {'key': 'typeProperties.sinks', 'type': '[DataFlowSink]'}, + 'transformations': {'key': 'typeProperties.transformations', 'type': '[Transformation]'}, + 'script': {'key': 'typeProperties.script', 'type': 'str'}, + } + + def __init__( + self, + *, + description: Optional[str] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DataFlowFolder"] = None, + sources: Optional[List["DataFlowSource"]] = None, + sinks: Optional[List["DataFlowSink"]] = None, + transformations: Optional[List["Transformation"]] = None, + script: Optional[str] = None, + **kwargs + ): + super(MappingDataFlow, self).__init__(description=description, annotations=annotations, folder=folder, **kwargs) + self.type = 'MappingDataFlow' # type: str + self.sources = sources + self.sinks = sinks + self.transformations = transformations + self.script = script + + +class MariaDBLinkedService(LinkedService): + """MariaDB server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param connection_string: An ODBC connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param pwd: The Azure key vault secret reference of password in connection string. + :type pwd: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + connection_string: Optional[object] = None, + pwd: Optional["AzureKeyVaultSecretReference"] = None, + encrypted_credential: Optional[object] = None, + **kwargs + ): + super(MariaDBLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'MariaDB' # type: str + self.connection_string = connection_string + self.pwd = pwd + self.encrypted_credential = encrypted_credential + + +class MariaDBSource(TabularSource): + """A copy activity MariaDB server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(MariaDBSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'MariaDBSource' # type: str + self.query = query + + +class MariaDBTableDataset(Dataset): + """MariaDB server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: The table name. Type: string (or Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__( + self, + *, + linked_service_name: "LinkedServiceReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + table_name: Optional[object] = None, + **kwargs + ): + super(MariaDBTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'MariaDBTable' # type: str + self.table_name = table_name + + +class MarketoLinkedService(LinkedService): + """Marketo server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param endpoint: Required. The endpoint of the Marketo server. (i.e. 123-ABC-321.mktorest.com). + :type endpoint: object + :param client_id: Required. The client Id of your Marketo service. + :type client_id: object + :param client_secret: The client secret of your Marketo service. + :type client_secret: ~azure.synapse.artifacts.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using + HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name in the server's + certificate to match the host name of the server when connecting over SSL. The default value is + true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of the server when + connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + 'client_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + *, + endpoint: object, + client_id: object, + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + client_secret: Optional["SecretBase"] = None, + use_encrypted_endpoints: Optional[object] = None, + use_host_verification: Optional[object] = None, + use_peer_verification: Optional[object] = None, + encrypted_credential: Optional[object] = None, + **kwargs + ): + super(MarketoLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'Marketo' # type: str + self.endpoint = endpoint + self.client_id = client_id + self.client_secret = client_secret + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + + +class MarketoObjectDataset(Dataset): + """Marketo server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: The table name. Type: string (or Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__( + self, + *, + linked_service_name: "LinkedServiceReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + table_name: Optional[object] = None, + **kwargs + ): + super(MarketoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'MarketoObject' # type: str + self.table_name = table_name + + +class MarketoSource(TabularSource): + """A copy activity Marketo server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(MarketoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'MarketoSource' # type: str + self.query = query + + +class MicrosoftAccessLinkedService(LinkedService): + """Microsoft Access linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param connection_string: Required. The non-access credential portion of the connection string + as well as an optional encrypted credential. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param authentication_type: Type of authentication used to connect to the Microsoft Access as + ODBC data store. Possible values are: Anonymous and Basic. Type: string (or Expression with + resultType string). + :type authentication_type: object + :param credential: The access credential portion of the connection string specified in driver- + specific property-value format. + :type credential: ~azure.synapse.artifacts.models.SecretBase + :param user_name: User name for Basic authentication. Type: string (or Expression with + resultType string). + :type user_name: object + :param password: Password for Basic authentication. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + *, + connection_string: object, + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + authentication_type: Optional[object] = None, + credential: Optional["SecretBase"] = None, + user_name: Optional[object] = None, + password: Optional["SecretBase"] = None, + encrypted_credential: Optional[object] = None, + **kwargs + ): + super(MicrosoftAccessLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'MicrosoftAccess' # type: str + self.connection_string = connection_string + self.authentication_type = authentication_type + self.credential = credential + self.user_name = user_name + self.password = password + self.encrypted_credential = encrypted_credential + + +class MicrosoftAccessSink(CopySink): + """A copy activity Microsoft Access sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param pre_copy_script: A query to execute before starting the copy. Type: string (or + Expression with resultType string). + :type pre_copy_script: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + pre_copy_script: Optional[object] = None, + **kwargs + ): + super(MicrosoftAccessSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'MicrosoftAccessSink' # type: str + self.pre_copy_script = pre_copy_script + + +class MicrosoftAccessSource(CopySource): + """A copy activity source for Microsoft Access. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query: Database query. Type: string (or Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(MicrosoftAccessSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'MicrosoftAccessSource' # type: str + self.query = query + + +class MicrosoftAccessTableDataset(Dataset): + """The Microsoft Access table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: The Microsoft Access table name. Type: string (or Expression with resultType + string). + :type table_name: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__( + self, + *, + linked_service_name: "LinkedServiceReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + table_name: Optional[object] = None, + **kwargs + ): + super(MicrosoftAccessTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'MicrosoftAccessTable' # type: str + self.table_name = table_name + + +class MongoDbCollectionDataset(Dataset): + """The MongoDB database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param collection_name: Required. The table name of the MongoDB database. Type: string (or + Expression with resultType string). + :type collection_name: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + 'collection_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'collection_name': {'key': 'typeProperties.collectionName', 'type': 'object'}, + } + + def __init__( + self, + *, + linked_service_name: "LinkedServiceReference", + collection_name: object, + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + **kwargs + ): + super(MongoDbCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'MongoDbCollection' # type: str + self.collection_name = collection_name + + +class MongoDbCursorMethodsProperties(msrest.serialization.Model): + """Cursor methods for Mongodb query. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param project: Specifies the fields to return in the documents that match the query filter. To + return all fields in the matching documents, omit this parameter. Type: string (or Expression + with resultType string). + :type project: object + :param sort: Specifies the order in which the query returns matching documents. Type: string + (or Expression with resultType string). Type: string (or Expression with resultType string). + :type sort: object + :param skip: Specifies the how many documents skipped and where MongoDB begins returning + results. This approach may be useful in implementing paginated results. Type: integer (or + Expression with resultType integer). + :type skip: object + :param limit: Specifies the maximum number of documents the server returns. limit() is + analogous to the LIMIT statement in a SQL database. Type: integer (or Expression with + resultType integer). + :type limit: object + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'project': {'key': 'project', 'type': 'object'}, + 'sort': {'key': 'sort', 'type': 'object'}, + 'skip': {'key': 'skip', 'type': 'object'}, + 'limit': {'key': 'limit', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + project: Optional[object] = None, + sort: Optional[object] = None, + skip: Optional[object] = None, + limit: Optional[object] = None, + **kwargs + ): + super(MongoDbCursorMethodsProperties, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.project = project + self.sort = sort + self.skip = skip + self.limit = limit + + +class MongoDbLinkedService(LinkedService): + """Linked service for MongoDb data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param server: Required. The IP address or server name of the MongoDB server. Type: string (or + Expression with resultType string). + :type server: object + :param authentication_type: The authentication type to be used to connect to the MongoDB + database. Possible values include: "Basic", "Anonymous". + :type authentication_type: str or ~azure.synapse.artifacts.models.MongoDbAuthenticationType + :param database_name: Required. The name of the MongoDB database that you want to access. Type: + string (or Expression with resultType string). + :type database_name: object + :param username: Username for authentication. Type: string (or Expression with resultType + string). + :type username: object + :param password: Password for authentication. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param auth_source: Database to verify the username and password. Type: string (or Expression + with resultType string). + :type auth_source: object + :param port: The TCP port number that the MongoDB server uses to listen for client connections. + The default value is 27017. Type: integer (or Expression with resultType integer), minimum: 0. + :type port: object + :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The + default value is false. Type: boolean (or Expression with resultType boolean). + :type enable_ssl: object + :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from + the server. The default value is false. Type: boolean (or Expression with resultType boolean). + :type allow_self_signed_server_cert: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'server': {'required': True}, + 'database_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'database_name': {'key': 'typeProperties.databaseName', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'auth_source': {'key': 'typeProperties.authSource', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + *, + server: object, + database_name: object, + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + authentication_type: Optional[Union[str, "MongoDbAuthenticationType"]] = None, + username: Optional[object] = None, + password: Optional["SecretBase"] = None, + auth_source: Optional[object] = None, + port: Optional[object] = None, + enable_ssl: Optional[object] = None, + allow_self_signed_server_cert: Optional[object] = None, + encrypted_credential: Optional[object] = None, + **kwargs + ): + super(MongoDbLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'MongoDb' # type: str + self.server = server + self.authentication_type = authentication_type + self.database_name = database_name + self.username = username + self.password = password + self.auth_source = auth_source + self.port = port + self.enable_ssl = enable_ssl + self.allow_self_signed_server_cert = allow_self_signed_server_cert + self.encrypted_credential = encrypted_credential + + +class MongoDbSource(CopySource): + """A copy activity source for a MongoDB database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query: Database query. Should be a SQL-92 query expression. Type: string (or Expression + with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(MongoDbSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'MongoDbSource' # type: str + self.query = query + + +class MongoDbV2CollectionDataset(Dataset): + """The MongoDB database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param collection: Required. The collection name of the MongoDB database. Type: string (or + Expression with resultType string). + :type collection: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + 'collection': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, + } + + def __init__( + self, + *, + linked_service_name: "LinkedServiceReference", + collection: object, + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + **kwargs + ): + super(MongoDbV2CollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'MongoDbV2Collection' # type: str + self.collection = collection + + +class MongoDbV2LinkedService(LinkedService): + """Linked service for MongoDB data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param connection_string: Required. The MongoDB connection string. Type: string, SecureString + or AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param database: Required. The name of the MongoDB database that you want to access. Type: + string (or Expression with resultType string). + :type database: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + 'database': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + } + + def __init__( + self, + *, + connection_string: object, + database: object, + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + **kwargs + ): + super(MongoDbV2LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'MongoDbV2' # type: str + self.connection_string = connection_string + self.database = database + + +class MongoDbV2Source(CopySource): + """A copy activity source for a MongoDB database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param filter: Specifies selection filter using query operators. To return all documents in a + collection, omit this parameter or pass an empty document ({}). Type: string (or Expression + with resultType string). + :type filter: object + :param cursor_methods: Cursor methods for Mongodb query. + :type cursor_methods: ~azure.synapse.artifacts.models.MongoDbCursorMethodsProperties + :param batch_size: Specifies the number of documents to return in each batch of the response + from MongoDB instance. In most cases, modifying the batch size will not affect the user or the + application. This property's main purpose is to avoid hit the limitation of response size. + Type: integer (or Expression with resultType integer). + :type batch_size: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'filter': {'key': 'filter', 'type': 'object'}, + 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, + 'batch_size': {'key': 'batchSize', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + filter: Optional[object] = None, + cursor_methods: Optional["MongoDbCursorMethodsProperties"] = None, + batch_size: Optional[object] = None, + query_timeout: Optional[object] = None, + **kwargs + ): + super(MongoDbV2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'MongoDbV2Source' # type: str + self.filter = filter + self.cursor_methods = cursor_methods + self.batch_size = batch_size + self.query_timeout = query_timeout + + +class MySqlLinkedService(LinkedService): + """Linked service for MySQL data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param connection_string: Required. The connection string. + :type connection_string: object + :param password: The Azure key vault secret reference of password in connection string. + :type password: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + *, + connection_string: object, + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + password: Optional["AzureKeyVaultSecretReference"] = None, + encrypted_credential: Optional[object] = None, + **kwargs + ): + super(MySqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'MySql' # type: str + self.connection_string = connection_string + self.password = password + self.encrypted_credential = encrypted_credential + + +class MySqlSource(TabularSource): + """A copy activity source for MySQL databases. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: Database query. Type: string (or Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(MySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'MySqlSource' # type: str + self.query = query + + +class MySqlTableDataset(Dataset): + """The MySQL table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: The MySQL table name. Type: string (or Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__( + self, + *, + linked_service_name: "LinkedServiceReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + table_name: Optional[object] = None, + **kwargs + ): + super(MySqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'MySqlTable' # type: str + self.table_name = table_name + + +class NetezzaLinkedService(LinkedService): + """Netezza linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param connection_string: An ODBC connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param pwd: The Azure key vault secret reference of password in connection string. + :type pwd: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + connection_string: Optional[object] = None, + pwd: Optional["AzureKeyVaultSecretReference"] = None, + encrypted_credential: Optional[object] = None, + **kwargs + ): + super(NetezzaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'Netezza' # type: str + self.connection_string = connection_string + self.pwd = pwd + self.encrypted_credential = encrypted_credential + + +class NetezzaPartitionSettings(msrest.serialization.Model): + """The settings that will be leveraged for Netezza source partitioning. + + :param partition_column_name: The name of the column in integer type that will be used for + proceeding range partitioning. Type: string (or Expression with resultType string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of column specified in partitionColumnName that + will be used for proceeding range partitioning. Type: string (or Expression with resultType + string). + :type partition_upper_bound: object + :param partition_lower_bound: The minimum value of column specified in partitionColumnName that + will be used for proceeding range partitioning. Type: string (or Expression with resultType + string). + :type partition_lower_bound: object + """ + + _attribute_map = { + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + } + + def __init__( + self, + *, + partition_column_name: Optional[object] = None, + partition_upper_bound: Optional[object] = None, + partition_lower_bound: Optional[object] = None, + **kwargs + ): + super(NetezzaPartitionSettings, self).__init__(**kwargs) + self.partition_column_name = partition_column_name + self.partition_upper_bound = partition_upper_bound + self.partition_lower_bound = partition_lower_bound + + +class NetezzaSource(TabularSource): + """A copy activity Netezza source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + :param partition_option: The partition mechanism that will be used for Netezza read in + parallel. Possible values include: "None", "DataSlice", "DynamicRange". + :type partition_option: str or ~azure.synapse.artifacts.models.NetezzaPartitionOption + :param partition_settings: The settings that will be leveraged for Netezza source partitioning. + :type partition_settings: ~azure.synapse.artifacts.models.NetezzaPartitionSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'NetezzaPartitionSettings'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + partition_option: Optional[Union[str, "NetezzaPartitionOption"]] = None, + partition_settings: Optional["NetezzaPartitionSettings"] = None, + **kwargs + ): + super(NetezzaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'NetezzaSource' # type: str + self.query = query + self.partition_option = partition_option + self.partition_settings = partition_settings + + +class NetezzaTableDataset(Dataset): + """Netezza dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: This property will be retired. Please consider using schema + table + properties instead. + :type table_name: object + :param table: The table name of the Netezza. Type: string (or Expression with resultType + string). + :type table: object + :param schema_type_properties_schema: The schema name of the Netezza. Type: string (or + Expression with resultType string). + :type schema_type_properties_schema: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } def __init__( self, *, - cluster_size: object, - time_to_live: object, - version: object, linked_service_name: "LinkedServiceReference", - host_subscription_id: object, - tenant: object, - cluster_resource_group: object, additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, - service_principal_id: Optional[object] = None, - service_principal_key: Optional["SecretBase"] = None, - cluster_name_prefix: Optional[object] = None, - cluster_user_name: Optional[object] = None, - cluster_password: Optional["SecretBase"] = None, - cluster_ssh_user_name: Optional[object] = None, - cluster_ssh_password: Optional["SecretBase"] = None, - additional_linked_service_names: Optional[List["LinkedServiceReference"]] = None, - hcatalog_linked_service_name: Optional["LinkedServiceReference"] = None, - cluster_type: Optional[object] = None, - spark_version: Optional[object] = None, - core_configuration: Optional[object] = None, - h_base_configuration: Optional[object] = None, - hdfs_configuration: Optional[object] = None, - hive_configuration: Optional[object] = None, - map_reduce_configuration: Optional[object] = None, - oozie_configuration: Optional[object] = None, - storm_configuration: Optional[object] = None, - yarn_configuration: Optional[object] = None, - encrypted_credential: Optional[object] = None, - head_node_size: Optional[object] = None, - data_node_size: Optional[object] = None, - zookeeper_node_size: Optional[object] = None, - script_actions: Optional[List["ScriptAction"]] = None, - virtual_network_id: Optional[object] = None, - subnet_name: Optional[object] = None, + folder: Optional["DatasetFolder"] = None, + table_name: Optional[object] = None, + table: Optional[object] = None, + schema_type_properties_schema: Optional[object] = None, **kwargs ): - super(HDInsightOnDemandLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'HDInsightOnDemand' - self.cluster_size = cluster_size - self.time_to_live = time_to_live - self.version = version - self.linked_service_name = linked_service_name - self.host_subscription_id = host_subscription_id - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.tenant = tenant - self.cluster_resource_group = cluster_resource_group - self.cluster_name_prefix = cluster_name_prefix - self.cluster_user_name = cluster_user_name - self.cluster_password = cluster_password - self.cluster_ssh_user_name = cluster_ssh_user_name - self.cluster_ssh_password = cluster_ssh_password - self.additional_linked_service_names = additional_linked_service_names - self.hcatalog_linked_service_name = hcatalog_linked_service_name - self.cluster_type = cluster_type - self.spark_version = spark_version - self.core_configuration = core_configuration - self.h_base_configuration = h_base_configuration - self.hdfs_configuration = hdfs_configuration - self.hive_configuration = hive_configuration - self.map_reduce_configuration = map_reduce_configuration - self.oozie_configuration = oozie_configuration - self.storm_configuration = storm_configuration - self.yarn_configuration = yarn_configuration - self.encrypted_credential = encrypted_credential - self.head_node_size = head_node_size - self.data_node_size = data_node_size - self.zookeeper_node_size = zookeeper_node_size - self.script_actions = script_actions - self.virtual_network_id = virtual_network_id - self.subnet_name = subnet_name + super(NetezzaTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'NetezzaTable' # type: str + self.table_name = table_name + self.table = table + self.schema_type_properties_schema = schema_type_properties_schema -class HDInsightPigActivity(ExecutionActivity): - """HDInsight Pig activity type. +class Notebook(msrest.serialization.Model): + """Notebook. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param description: The description of the notebook. + :type description: str + :param big_data_pool: Big data pool reference. + :type big_data_pool: ~azure.synapse.artifacts.models.BigDataPoolReference + :param session_properties: Session properties. + :type session_properties: ~azure.synapse.artifacts.models.NotebookSessionProperties + :param metadata: Required. Notebook root-level metadata. + :type metadata: ~azure.synapse.artifacts.models.NotebookMetadata + :param nbformat: Required. Notebook format (major number). Incremented between backwards + incompatible changes to the notebook format. + :type nbformat: int + :param nbformat_minor: Required. Notebook format (minor number). Incremented for backward + compatible changes to the notebook format. + :type nbformat_minor: int + :param cells: Required. Array of cells of the current notebook. + :type cells: list[~azure.synapse.artifacts.models.NotebookCell] + """ + + _validation = { + 'metadata': {'required': True}, + 'nbformat': {'required': True}, + 'nbformat_minor': {'required': True}, + 'cells': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'big_data_pool': {'key': 'bigDataPool', 'type': 'BigDataPoolReference'}, + 'session_properties': {'key': 'sessionProperties', 'type': 'NotebookSessionProperties'}, + 'metadata': {'key': 'metadata', 'type': 'NotebookMetadata'}, + 'nbformat': {'key': 'nbformat', 'type': 'int'}, + 'nbformat_minor': {'key': 'nbformat_minor', 'type': 'int'}, + 'cells': {'key': 'cells', 'type': '[NotebookCell]'}, + } + + def __init__( + self, + *, + metadata: "NotebookMetadata", + nbformat: int, + nbformat_minor: int, + cells: List["NotebookCell"], + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + big_data_pool: Optional["BigDataPoolReference"] = None, + session_properties: Optional["NotebookSessionProperties"] = None, + **kwargs + ): + super(Notebook, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.description = description + self.big_data_pool = big_data_pool + self.session_properties = session_properties + self.metadata = metadata + self.nbformat = nbformat + self.nbformat_minor = nbformat_minor + self.cells = cells + + +class NotebookCell(msrest.serialization.Model): + """Notebook cell. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param storage_linked_services: Storage linked service references. - :type storage_linked_services: list[~azure.synapse.artifacts.models.LinkedServiceReference] - :param arguments: User specified arguments to HDInsightActivity. Type: array (or Expression - with resultType array). - :type arguments: object - :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". - :type get_debug_info: str or ~azure.synapse.artifacts.models.HDInsightActivityDebugInfoOption - :param script_path: Script path. Type: string (or Expression with resultType string). - :type script_path: object - :param script_linked_service: Script linked service reference. - :type script_linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference - :param defines: Allows user to specify defines for Pig job request. - :type defines: dict[str, object] + :param cell_type: Required. String identifying the type of cell. + :type cell_type: str + :param metadata: Required. Cell-level metadata. + :type metadata: object + :param source: Required. Contents of the cell, represented as an array of lines. + :type source: list[str] + :param attachments: Attachments associated with the cell. + :type attachments: object + :param outputs: Cell-level output items. + :type outputs: list[~azure.synapse.artifacts.models.NotebookCellOutputItem] """ _validation = { - 'name': {'required': True}, - 'type': {'required': True}, + 'cell_type': {'required': True}, + 'metadata': {'required': True}, + 'source': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, - 'arguments': {'key': 'typeProperties.arguments', 'type': 'object'}, - 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, - 'script_path': {'key': 'typeProperties.scriptPath', 'type': 'object'}, - 'script_linked_service': {'key': 'typeProperties.scriptLinkedService', 'type': 'LinkedServiceReference'}, - 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, + 'cell_type': {'key': 'cell_type', 'type': 'str'}, + 'metadata': {'key': 'metadata', 'type': 'object'}, + 'source': {'key': 'source', 'type': '[str]'}, + 'attachments': {'key': 'attachments', 'type': 'object'}, + 'outputs': {'key': 'outputs', 'type': '[NotebookCellOutputItem]'}, } def __init__( self, *, - name: str, + cell_type: str, + metadata: object, + source: List[str], additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - depends_on: Optional[List["ActivityDependency"]] = None, - user_properties: Optional[List["UserProperty"]] = None, - linked_service_name: Optional["LinkedServiceReference"] = None, - policy: Optional["ActivityPolicy"] = None, - storage_linked_services: Optional[List["LinkedServiceReference"]] = None, - arguments: Optional[object] = None, - get_debug_info: Optional[Union[str, "HDInsightActivityDebugInfoOption"]] = None, - script_path: Optional[object] = None, - script_linked_service: Optional["LinkedServiceReference"] = None, - defines: Optional[Dict[str, object]] = None, + attachments: Optional[object] = None, + outputs: Optional[List["NotebookCellOutputItem"]] = None, **kwargs ): - super(HDInsightPigActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type: str = 'HDInsightPig' - self.storage_linked_services = storage_linked_services - self.arguments = arguments - self.get_debug_info = get_debug_info - self.script_path = script_path - self.script_linked_service = script_linked_service - self.defines = defines + super(NotebookCell, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.cell_type = cell_type + self.metadata = metadata + self.source = source + self.attachments = attachments + self.outputs = outputs -class HDInsightSparkActivity(ExecutionActivity): - """HDInsight Spark activity. +class NotebookCellOutputItem(msrest.serialization.Model): + """An item of the notebook cell execution output. All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param name: Required. Activity name. + :param name: For output_type=stream, determines the name of stream (stdout / stderr). :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param root_path: Required. The root path in 'sparkJobLinkedService' for all the job’s files. - Type: string (or Expression with resultType string). - :type root_path: object - :param entry_file_path: Required. The relative path to the root folder of the code/package to - be executed. Type: string (or Expression with resultType string). - :type entry_file_path: object - :param arguments: The user-specified arguments to HDInsightSparkActivity. - :type arguments: list[object] - :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". - :type get_debug_info: str or ~azure.synapse.artifacts.models.HDInsightActivityDebugInfoOption - :param spark_job_linked_service: The storage linked service for uploading the entry file and - dependencies, and for receiving logs. - :type spark_job_linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference - :param class_name: The application's Java/Spark main class. - :type class_name: str - :param proxy_user: The user to impersonate that will execute the job. Type: string (or - Expression with resultType string). - :type proxy_user: object - :param spark_config: Spark configuration property. - :type spark_config: dict[str, object] + :param execution_count: Execution sequence number. + :type execution_count: int + :param output_type: Required. Execution, display, or stream outputs. Possible values include: + "execute_result", "display_data", "stream", "error". + :type output_type: str or ~azure.synapse.artifacts.models.CellOutputType + :param text: For output_type=stream, the stream's text output, represented as a string or an + array of strings. + :type text: object + :param data: Output data. Use MIME type as key, and content as value. + :type data: object + :param metadata: Metadata for the output item. + :type metadata: object """ _validation = { - 'name': {'required': True}, - 'type': {'required': True}, - 'root_path': {'required': True}, - 'entry_file_path': {'required': True}, + 'output_type': {'required': True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'root_path': {'key': 'typeProperties.rootPath', 'type': 'object'}, - 'entry_file_path': {'key': 'typeProperties.entryFilePath', 'type': 'object'}, - 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, - 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, - 'spark_job_linked_service': {'key': 'typeProperties.sparkJobLinkedService', 'type': 'LinkedServiceReference'}, - 'class_name': {'key': 'typeProperties.className', 'type': 'str'}, - 'proxy_user': {'key': 'typeProperties.proxyUser', 'type': 'object'}, - 'spark_config': {'key': 'typeProperties.sparkConfig', 'type': '{object}'}, + 'execution_count': {'key': 'execution_count', 'type': 'int'}, + 'output_type': {'key': 'output_type', 'type': 'str'}, + 'text': {'key': 'text', 'type': 'object'}, + 'data': {'key': 'data', 'type': 'object'}, + 'metadata': {'key': 'metadata', 'type': 'object'}, } def __init__( self, *, - name: str, - root_path: object, - entry_file_path: object, - additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - depends_on: Optional[List["ActivityDependency"]] = None, - user_properties: Optional[List["UserProperty"]] = None, - linked_service_name: Optional["LinkedServiceReference"] = None, - policy: Optional["ActivityPolicy"] = None, - arguments: Optional[List[object]] = None, - get_debug_info: Optional[Union[str, "HDInsightActivityDebugInfoOption"]] = None, - spark_job_linked_service: Optional["LinkedServiceReference"] = None, - class_name: Optional[str] = None, - proxy_user: Optional[object] = None, - spark_config: Optional[Dict[str, object]] = None, + output_type: Union[str, "CellOutputType"], + name: Optional[str] = None, + execution_count: Optional[int] = None, + text: Optional[object] = None, + data: Optional[object] = None, + metadata: Optional[object] = None, **kwargs ): - super(HDInsightSparkActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type: str = 'HDInsightSpark' - self.root_path = root_path - self.entry_file_path = entry_file_path - self.arguments = arguments - self.get_debug_info = get_debug_info - self.spark_job_linked_service = spark_job_linked_service - self.class_name = class_name - self.proxy_user = proxy_user - self.spark_config = spark_config - + super(NotebookCellOutputItem, self).__init__(**kwargs) + self.name = name + self.execution_count = execution_count + self.output_type = output_type + self.text = text + self.data = data + self.metadata = metadata -class HDInsightStreamingActivity(ExecutionActivity): - """HDInsight streaming activity type. - All required parameters must be populated in order to send to Azure. +class NotebookKernelSpec(msrest.serialization.Model): + """Kernel information. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param storage_linked_services: Storage linked service references. - :type storage_linked_services: list[~azure.synapse.artifacts.models.LinkedServiceReference] - :param arguments: User specified arguments to HDInsightActivity. - :type arguments: list[object] - :param get_debug_info: Debug info option. Possible values include: "None", "Always", "Failure". - :type get_debug_info: str or ~azure.synapse.artifacts.models.HDInsightActivityDebugInfoOption - :param mapper: Required. Mapper executable name. Type: string (or Expression with resultType - string). - :type mapper: object - :param reducer: Required. Reducer executable name. Type: string (or Expression with resultType - string). - :type reducer: object - :param input: Required. Input blob path. Type: string (or Expression with resultType string). - :type input: object - :param output: Required. Output blob path. Type: string (or Expression with resultType string). - :type output: object - :param file_paths: Required. Paths to streaming job files. Can be directories. - :type file_paths: list[object] - :param file_linked_service: Linked service reference where the files are located. - :type file_linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference - :param combiner: Combiner executable name. Type: string (or Expression with resultType string). - :type combiner: object - :param command_environment: Command line environment values. - :type command_environment: list[object] - :param defines: Allows user to specify defines for streaming job request. - :type defines: dict[str, object] + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Name of the kernel specification. + :type name: str + :param display_name: Required. Name to display in UI. + :type display_name: str """ _validation = { 'name': {'required': True}, - 'type': {'required': True}, - 'mapper': {'required': True}, - 'reducer': {'required': True}, - 'input': {'required': True}, - 'output': {'required': True}, - 'file_paths': {'required': True}, + 'display_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, - 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, - 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, - 'mapper': {'key': 'typeProperties.mapper', 'type': 'object'}, - 'reducer': {'key': 'typeProperties.reducer', 'type': 'object'}, - 'input': {'key': 'typeProperties.input', 'type': 'object'}, - 'output': {'key': 'typeProperties.output', 'type': 'object'}, - 'file_paths': {'key': 'typeProperties.filePaths', 'type': '[object]'}, - 'file_linked_service': {'key': 'typeProperties.fileLinkedService', 'type': 'LinkedServiceReference'}, - 'combiner': {'key': 'typeProperties.combiner', 'type': 'object'}, - 'command_environment': {'key': 'typeProperties.commandEnvironment', 'type': '[object]'}, - 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, + 'display_name': {'key': 'display_name', 'type': 'str'}, } def __init__( self, *, name: str, - mapper: object, - reducer: object, - input: object, - output: object, - file_paths: List[object], + display_name: str, additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - depends_on: Optional[List["ActivityDependency"]] = None, - user_properties: Optional[List["UserProperty"]] = None, - linked_service_name: Optional["LinkedServiceReference"] = None, - policy: Optional["ActivityPolicy"] = None, - storage_linked_services: Optional[List["LinkedServiceReference"]] = None, - arguments: Optional[List[object]] = None, - get_debug_info: Optional[Union[str, "HDInsightActivityDebugInfoOption"]] = None, - file_linked_service: Optional["LinkedServiceReference"] = None, - combiner: Optional[object] = None, - command_environment: Optional[List[object]] = None, - defines: Optional[Dict[str, object]] = None, **kwargs ): - super(HDInsightStreamingActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type: str = 'HDInsightStreaming' - self.storage_linked_services = storage_linked_services - self.arguments = arguments - self.get_debug_info = get_debug_info - self.mapper = mapper - self.reducer = reducer - self.input = input - self.output = output - self.file_paths = file_paths - self.file_linked_service = file_linked_service - self.combiner = combiner - self.command_environment = command_environment - self.defines = defines + super(NotebookKernelSpec, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.name = name + self.display_name = display_name -class HiveLinkedService(LinkedService): - """Hive Server linked service. +class NotebookLanguageInfo(msrest.serialization.Model): + """Language info. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param host: Required. IP address or host name of the Hive server, separated by ';' for - multiple hosts (only when serviceDiscoveryMode is enable). - :type host: object - :param port: The TCP port that the Hive server uses to listen for client connections. - :type port: object - :param server_type: The type of Hive server. Possible values include: "HiveServer1", - "HiveServer2", "HiveThriftServer". - :type server_type: str or ~azure.synapse.artifacts.models.HiveServerType - :param thrift_transport_protocol: The transport protocol to use in the Thrift layer. Possible - values include: "Binary", "SASL", "HTTP ". - :type thrift_transport_protocol: str or - ~azure.synapse.artifacts.models.HiveThriftTransportProtocol - :param authentication_type: Required. The authentication method used to access the Hive server. - Possible values include: "Anonymous", "Username", "UsernameAndPassword", - "WindowsAzureHDInsightService". - :type authentication_type: str or ~azure.synapse.artifacts.models.HiveAuthenticationType - :param service_discovery_mode: true to indicate using the ZooKeeper service, false not. - :type service_discovery_mode: object - :param zoo_keeper_name_space: The namespace on ZooKeeper under which Hive Server 2 nodes are - added. - :type zoo_keeper_name_space: object - :param use_native_query: Specifies whether the driver uses native HiveQL queries,or converts - them into an equivalent form in HiveQL. - :type use_native_query: object - :param username: The user name that you use to access Hive Server. - :type username: object - :param password: The password corresponding to the user name that you provided in the Username - field. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param http_path: The partial URL corresponding to the Hive server. - :type http_path: object - :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The - default value is false. - :type enable_ssl: object - :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for - verifying the server when connecting over SSL. This property can only be set when using SSL on - self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust - store or from a specified PEM file. The default value is false. - :type use_system_trust_store: object - :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate - name to match the host name of the server when connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: object - :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from - the server. The default value is false. - :type allow_self_signed_server_cert: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param name: Required. The programming language which this kernel runs. + :type name: str + :param codemirror_mode: The codemirror mode to use for code in this language. + :type codemirror_mode: str """ _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - 'authentication_type': {'required': True}, + 'name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'server_type': {'key': 'typeProperties.serverType', 'type': 'str'}, - 'thrift_transport_protocol': {'key': 'typeProperties.thriftTransportProtocol', 'type': 'str'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'service_discovery_mode': {'key': 'typeProperties.serviceDiscoveryMode', 'type': 'object'}, - 'zoo_keeper_name_space': {'key': 'typeProperties.zooKeeperNameSpace', 'type': 'object'}, - 'use_native_query': {'key': 'typeProperties.useNativeQuery', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, - 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, - 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'name': {'key': 'name', 'type': 'str'}, + 'codemirror_mode': {'key': 'codemirror_mode', 'type': 'str'}, } def __init__( self, *, - host: object, - authentication_type: Union[str, "HiveAuthenticationType"], + name: str, additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, - description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - port: Optional[object] = None, - server_type: Optional[Union[str, "HiveServerType"]] = None, - thrift_transport_protocol: Optional[Union[str, "HiveThriftTransportProtocol"]] = None, - service_discovery_mode: Optional[object] = None, - zoo_keeper_name_space: Optional[object] = None, - use_native_query: Optional[object] = None, - username: Optional[object] = None, - password: Optional["SecretBase"] = None, - http_path: Optional[object] = None, - enable_ssl: Optional[object] = None, - trusted_cert_path: Optional[object] = None, - use_system_trust_store: Optional[object] = None, - allow_host_name_cn_mismatch: Optional[object] = None, - allow_self_signed_server_cert: Optional[object] = None, - encrypted_credential: Optional[object] = None, + codemirror_mode: Optional[str] = None, **kwargs ): - super(HiveLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Hive' - self.host = host - self.port = port - self.server_type = server_type - self.thrift_transport_protocol = thrift_transport_protocol - self.authentication_type = authentication_type - self.service_discovery_mode = service_discovery_mode - self.zoo_keeper_name_space = zoo_keeper_name_space - self.use_native_query = use_native_query - self.username = username - self.password = password - self.http_path = http_path - self.enable_ssl = enable_ssl - self.trusted_cert_path = trusted_cert_path - self.use_system_trust_store = use_system_trust_store - self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch - self.allow_self_signed_server_cert = allow_self_signed_server_cert - self.encrypted_credential = encrypted_credential + super(NotebookLanguageInfo, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.name = name + self.codemirror_mode = codemirror_mode + + +class NotebookListResponse(msrest.serialization.Model): + """A list of Notebook resources. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of Notebooks. + :type value: list[~azure.synapse.artifacts.models.NotebookResource] + :param next_link: The link to the next page of results, if any remaining results exist. + :type next_link: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[NotebookResource]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + *, + value: List["NotebookResource"], + next_link: Optional[str] = None, + **kwargs + ): + super(NotebookListResponse, self).__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class NotebookMetadata(msrest.serialization.Model): + """Notebook root-level metadata. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param kernelspec: Kernel information. + :type kernelspec: ~azure.synapse.artifacts.models.NotebookKernelSpec + :param language_info: Language info. + :type language_info: ~azure.synapse.artifacts.models.NotebookLanguageInfo + """ + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'kernelspec': {'key': 'kernelspec', 'type': 'NotebookKernelSpec'}, + 'language_info': {'key': 'language_info', 'type': 'NotebookLanguageInfo'}, + } -class HiveObjectDataset(Dataset): - """Hive Server dataset. + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + kernelspec: Optional["NotebookKernelSpec"] = None, + language_info: Optional["NotebookLanguageInfo"] = None, + **kwargs + ): + super(NotebookMetadata, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.kernelspec = kernelspec + self.language_info = language_info + + +class NotebookResource(AzureEntityResource): + """Notebook resource type. + + Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table - properties instead. - :type table_name: object - :param table: The table name of the Hive. Type: string (or Expression with resultType string). - :type table: object - :param schema_type_properties_schema: The schema name of the Hive. Type: string (or Expression - with resultType string). - :type schema_type_properties_schema: object + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + :ivar etag: Resource Etag. + :vartype etag: str + :param properties: Required. Properties of Notebook. + :type properties: ~azure.synapse.artifacts.models.Notebook """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'Notebook'}, } def __init__( self, *, - linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, - table: Optional[object] = None, - schema_type_properties_schema: Optional[object] = None, + properties: "Notebook", **kwargs ): - super(HiveObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'HiveObject' - self.table_name = table_name - self.table = table - self.schema_type_properties_schema = schema_type_properties_schema + super(NotebookResource, self).__init__(**kwargs) + self.properties = properties -class HttpLinkedService(LinkedService): - """Linked service for an HTTP source. +class NotebookSessionProperties(msrest.serialization.Model): + """Session properties. All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param url: Required. The base URL of the HTTP endpoint, e.g. http://www.microsoft.com. Type: - string (or Expression with resultType string). - :type url: object - :param authentication_type: The authentication type to be used to connect to the HTTP server. - Possible values include: "Basic", "Anonymous", "Digest", "Windows", "ClientCertificate". - :type authentication_type: str or ~azure.synapse.artifacts.models.HttpAuthenticationType - :param user_name: User name for Basic, Digest, or Windows authentication. Type: string (or - Expression with resultType string). - :type user_name: object - :param password: Password for Basic, Digest, Windows, or ClientCertificate with - EmbeddedCertData authentication. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param embedded_cert_data: Base64 encoded certificate data for ClientCertificate - authentication. For on-premises copy with ClientCertificate authentication, either - CertThumbprint or EmbeddedCertData/Password should be specified. Type: string (or Expression - with resultType string). - :type embedded_cert_data: object - :param cert_thumbprint: Thumbprint of certificate for ClientCertificate authentication. Only - valid for on-premises copy. For on-premises copy with ClientCertificate authentication, either - CertThumbprint or EmbeddedCertData/Password should be specified. Type: string (or Expression - with resultType string). - :type cert_thumbprint: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - :param enable_server_certificate_validation: If true, validate the HTTPS server SSL - certificate. Default value is true. Type: boolean (or Expression with resultType boolean). - :type enable_server_certificate_validation: object + :param driver_memory: Required. Amount of memory to use for the driver process. + :type driver_memory: str + :param driver_cores: Required. Number of cores to use for the driver. + :type driver_cores: int + :param executor_memory: Required. Amount of memory to use per executor process. + :type executor_memory: str + :param executor_cores: Required. Number of cores to use for each executor. + :type executor_cores: int + :param num_executors: Required. Number of executors to launch for this session. + :type num_executors: int """ _validation = { - 'type': {'required': True}, - 'url': {'required': True}, + 'driver_memory': {'required': True}, + 'driver_cores': {'required': True}, + 'executor_memory': {'required': True}, + 'executor_cores': {'required': True}, + 'num_executors': {'required': True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'embedded_cert_data': {'key': 'typeProperties.embeddedCertData', 'type': 'object'}, - 'cert_thumbprint': {'key': 'typeProperties.certThumbprint', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, + 'driver_memory': {'key': 'driverMemory', 'type': 'str'}, + 'driver_cores': {'key': 'driverCores', 'type': 'int'}, + 'executor_memory': {'key': 'executorMemory', 'type': 'str'}, + 'executor_cores': {'key': 'executorCores', 'type': 'int'}, + 'num_executors': {'key': 'numExecutors', 'type': 'int'}, } def __init__( self, *, - url: object, - additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, - description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - authentication_type: Optional[Union[str, "HttpAuthenticationType"]] = None, - user_name: Optional[object] = None, - password: Optional["SecretBase"] = None, - embedded_cert_data: Optional[object] = None, - cert_thumbprint: Optional[object] = None, - encrypted_credential: Optional[object] = None, - enable_server_certificate_validation: Optional[object] = None, + driver_memory: str, + driver_cores: int, + executor_memory: str, + executor_cores: int, + num_executors: int, **kwargs ): - super(HttpLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'HttpServer' - self.url = url - self.authentication_type = authentication_type - self.user_name = user_name - self.password = password - self.embedded_cert_data = embedded_cert_data - self.cert_thumbprint = cert_thumbprint - self.encrypted_credential = encrypted_credential - self.enable_server_certificate_validation = enable_server_certificate_validation + super(NotebookSessionProperties, self).__init__(**kwargs) + self.driver_memory = driver_memory + self.driver_cores = driver_cores + self.executor_memory = executor_memory + self.executor_cores = executor_cores + self.num_executors = num_executors -class HubspotLinkedService(LinkedService): - """Hubspot Service linked service. +class ODataLinkedService(LinkedService): + """Open Data Protocol (OData) linked service. All required parameters must be populated in order to send to Azure. @@ -11407,26 +22267,42 @@ class HubspotLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param client_id: Required. The client ID associated with your Hubspot application. - :type client_id: object - :param client_secret: The client secret associated with your Hubspot application. - :type client_secret: ~azure.synapse.artifacts.models.SecretBase - :param access_token: The access token obtained when initially authenticating your OAuth - integration. - :type access_token: ~azure.synapse.artifacts.models.SecretBase - :param refresh_token: The refresh token obtained when initially authenticating your OAuth - integration. - :type refresh_token: ~azure.synapse.artifacts.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using - HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name in the server's - certificate to match the host name of the server when connecting over SSL. The default value is - true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of the server when - connecting over SSL. The default value is true. - :type use_peer_verification: object + :param url: Required. The URL of the OData service endpoint. Type: string (or Expression with + resultType string). + :type url: object + :param authentication_type: Type of authentication used to connect to the OData service. + Possible values include: "Basic", "Anonymous", "Windows", "AadServicePrincipal", + "ManagedServiceIdentity". + :type authentication_type: str or ~azure.synapse.artifacts.models.ODataAuthenticationType + :param user_name: User name of the OData service. Type: string (or Expression with resultType + string). + :type user_name: object + :param password: Password of the OData service. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param tenant: Specify the tenant information (domain name or tenant ID) under which your + application resides. Type: string (or Expression with resultType string). + :type tenant: object + :param service_principal_id: Specify the application id of your application registered in Azure + Active Directory. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param aad_resource_id: Specify the resource you are requesting authorization to use Directory. + Type: string (or Expression with resultType string). + :type aad_resource_id: object + :param aad_service_principal_credential_type: Specify the credential type (key or cert) is used + for service principal. Possible values include: "ServicePrincipalKey", "ServicePrincipalCert". + :type aad_service_principal_credential_type: str or + ~azure.synapse.artifacts.models.ODataAadServicePrincipalCredentialType + :param service_principal_key: Specify the secret of your application registered in Azure Active + Directory. Type: string (or Expression with resultType string). + :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase + :param service_principal_embedded_cert: Specify the base64 encoded certificate of your + application registered in Azure Active Directory. Type: string (or Expression with resultType + string). + :type service_principal_embedded_cert: ~azure.synapse.artifacts.models.SecretBase + :param service_principal_embedded_cert_password: Specify the password of your certificate if + your certificate has a password and you are using AadServicePrincipal authentication. Type: + string (or Expression with resultType string). + :type service_principal_embedded_cert_password: ~azure.synapse.artifacts.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -11435,7 +22311,7 @@ class HubspotLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'client_id': {'required': True}, + 'url': {'required': True}, } _attribute_map = { @@ -11445,48 +22321,60 @@ class HubspotLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, - 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, + 'aad_service_principal_credential_type': {'key': 'typeProperties.aadServicePrincipalCredentialType', 'type': 'str'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'service_principal_embedded_cert': {'key': 'typeProperties.servicePrincipalEmbeddedCert', 'type': 'SecretBase'}, + 'service_principal_embedded_cert_password': {'key': 'typeProperties.servicePrincipalEmbeddedCertPassword', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, *, - client_id: object, + url: object, additional_properties: Optional[Dict[str, object]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, - client_secret: Optional["SecretBase"] = None, - access_token: Optional["SecretBase"] = None, - refresh_token: Optional["SecretBase"] = None, - use_encrypted_endpoints: Optional[object] = None, - use_host_verification: Optional[object] = None, - use_peer_verification: Optional[object] = None, + authentication_type: Optional[Union[str, "ODataAuthenticationType"]] = None, + user_name: Optional[object] = None, + password: Optional["SecretBase"] = None, + tenant: Optional[object] = None, + service_principal_id: Optional[object] = None, + aad_resource_id: Optional[object] = None, + aad_service_principal_credential_type: Optional[Union[str, "ODataAadServicePrincipalCredentialType"]] = None, + service_principal_key: Optional["SecretBase"] = None, + service_principal_embedded_cert: Optional["SecretBase"] = None, + service_principal_embedded_cert_password: Optional["SecretBase"] = None, encrypted_credential: Optional[object] = None, **kwargs ): - super(HubspotLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Hubspot' - self.client_id = client_id - self.client_secret = client_secret - self.access_token = access_token - self.refresh_token = refresh_token - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification + super(ODataLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'OData' # type: str + self.url = url + self.authentication_type = authentication_type + self.user_name = user_name + self.password = password + self.tenant = tenant + self.service_principal_id = service_principal_id + self.aad_resource_id = aad_resource_id + self.aad_service_principal_credential_type = aad_service_principal_credential_type + self.service_principal_key = service_principal_key + self.service_principal_embedded_cert = service_principal_embedded_cert + self.service_principal_embedded_cert_password = service_principal_embedded_cert_password self.encrypted_credential = encrypted_credential -class HubspotObjectDataset(Dataset): - """Hubspot Service dataset. +class ODataResourceDataset(Dataset): + """The Open Data Protocol (OData) resource dataset. All required parameters must be populated in order to send to Azure. @@ -11512,8 +22400,8 @@ class HubspotObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :param path: The OData resource path. Type: string (or Expression with resultType string). + :type path: object """ _validation = { @@ -11531,7 +22419,7 @@ class HubspotObjectDataset(Dataset): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'path': {'key': 'typeProperties.path', 'type': 'object'}, } def __init__( @@ -11545,200 +22433,271 @@ def __init__( parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, + path: Optional[object] = None, **kwargs ): - super(HubspotObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'HubspotObject' - self.table_name = table_name + super(ODataResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'ODataResource' # type: str + self.path = path -class IfConditionActivity(Activity): - """This activity evaluates a boolean expression and executes either the activities under the ifTrueActivities property or the ifFalseActivities property depending on the result of the expression. +class ODataSource(CopySource): + """A copy activity source for OData source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param description: Activity description. + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query: OData query. For example, "$top=1". Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(ODataSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'ODataSource' # type: str + self.query = query + + +class OdbcLinkedService(LinkedService): + """Open Database Connectivity (ODBC) linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param expression: Required. An expression that would evaluate to Boolean. This is used to - determine the block of activities (ifTrueActivities or ifFalseActivities) that will be - executed. - :type expression: ~azure.synapse.artifacts.models.Expression - :param if_true_activities: List of activities to execute if expression is evaluated to true. - This is an optional property and if not provided, the activity will exit without any action. - :type if_true_activities: list[~azure.synapse.artifacts.models.Activity] - :param if_false_activities: List of activities to execute if expression is evaluated to false. - This is an optional property and if not provided, the activity will exit without any action. - :type if_false_activities: list[~azure.synapse.artifacts.models.Activity] + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param connection_string: Required. The non-access credential portion of the connection string + as well as an optional encrypted credential. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param authentication_type: Type of authentication used to connect to the ODBC data store. + Possible values are: Anonymous and Basic. Type: string (or Expression with resultType string). + :type authentication_type: object + :param credential: The access credential portion of the connection string specified in driver- + specific property-value format. + :type credential: ~azure.synapse.artifacts.models.SecretBase + :param user_name: User name for Basic authentication. Type: string (or Expression with + resultType string). + :type user_name: object + :param password: Password for Basic authentication. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + *, + connection_string: object, + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + authentication_type: Optional[object] = None, + credential: Optional["SecretBase"] = None, + user_name: Optional[object] = None, + password: Optional["SecretBase"] = None, + encrypted_credential: Optional[object] = None, + **kwargs + ): + super(OdbcLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'Odbc' # type: str + self.connection_string = connection_string + self.authentication_type = authentication_type + self.credential = credential + self.user_name = user_name + self.password = password + self.encrypted_credential = encrypted_credential + + +class OdbcSink(CopySink): + """A copy activity ODBC sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param pre_copy_script: A query to execute before starting the copy. Type: string (or + Expression with resultType string). + :type pre_copy_script: object """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, - 'expression': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'expression': {'key': 'typeProperties.expression', 'type': 'Expression'}, - 'if_true_activities': {'key': 'typeProperties.ifTrueActivities', 'type': '[Activity]'}, - 'if_false_activities': {'key': 'typeProperties.ifFalseActivities', 'type': '[Activity]'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } def __init__( self, *, - name: str, - expression: "Expression", additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - depends_on: Optional[List["ActivityDependency"]] = None, - user_properties: Optional[List["UserProperty"]] = None, - if_true_activities: Optional[List["Activity"]] = None, - if_false_activities: Optional[List["Activity"]] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + pre_copy_script: Optional[object] = None, **kwargs ): - super(IfConditionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.type: str = 'IfCondition' - self.expression = expression - self.if_true_activities = if_true_activities - self.if_false_activities = if_false_activities + super(OdbcSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'OdbcSink' # type: str + self.pre_copy_script = pre_copy_script -class ImpalaLinkedService(LinkedService): - """Impala server linked service. +class OdbcSource(TabularSource): + """A copy activity source for ODBC databases. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param host: Required. The IP address or host name of the Impala server. (i.e. - 192.168.222.160). - :type host: object - :param port: The TCP port that the Impala server uses to listen for client connections. The - default value is 21050. - :type port: object - :param authentication_type: Required. The authentication type to use. Possible values include: - "Anonymous", "SASLUsername", "UsernameAndPassword". - :type authentication_type: str or ~azure.synapse.artifacts.models.ImpalaAuthenticationType - :param username: The user name used to access the Impala server. The default value is anonymous - when using SASLUsername. - :type username: object - :param password: The password corresponding to the user name when using UsernameAndPassword. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The - default value is false. - :type enable_ssl: object - :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for - verifying the server when connecting over SSL. This property can only be set when using SSL on - self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust - store or from a specified PEM file. The default value is false. - :type use_system_trust_store: object - :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate - name to match the host name of the server when connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: object - :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from - the server. The default value is false. - :type allow_self_signed_server_cert: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: Database query. Type: string (or Expression with resultType string). + :type query: object """ _validation = { 'type': {'required': True}, - 'host': {'required': True}, - 'authentication_type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, - 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, - 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, } def __init__( self, *, - host: object, - authentication_type: Union[str, "ImpalaAuthenticationType"], additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, - description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - port: Optional[object] = None, - username: Optional[object] = None, - password: Optional["SecretBase"] = None, - enable_ssl: Optional[object] = None, - trusted_cert_path: Optional[object] = None, - use_system_trust_store: Optional[object] = None, - allow_host_name_cn_mismatch: Optional[object] = None, - allow_self_signed_server_cert: Optional[object] = None, - encrypted_credential: Optional[object] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, **kwargs ): - super(ImpalaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Impala' - self.host = host - self.port = port - self.authentication_type = authentication_type - self.username = username - self.password = password - self.enable_ssl = enable_ssl - self.trusted_cert_path = trusted_cert_path - self.use_system_trust_store = use_system_trust_store - self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch - self.allow_self_signed_server_cert = allow_self_signed_server_cert - self.encrypted_credential = encrypted_credential + super(OdbcSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'OdbcSource' # type: str + self.query = query -class ImpalaObjectDataset(Dataset): - """Impala server dataset. +class OdbcTableDataset(Dataset): + """The ODBC table dataset. All required parameters must be populated in order to send to Azure. @@ -11764,15 +22723,8 @@ class ImpalaObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table - properties instead. + :param table_name: The ODBC table name. Type: string (or Expression with resultType string). :type table_name: object - :param table: The table name of the Impala. Type: string (or Expression with resultType - string). - :type table: object - :param schema_type_properties_schema: The schema name of the Impala. Type: string (or - Expression with resultType string). - :type schema_type_properties_schema: object """ _validation = { @@ -11791,8 +22743,6 @@ class ImpalaObjectDataset(Dataset): 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } def __init__( @@ -11807,215 +22757,255 @@ def __init__( annotations: Optional[List[object]] = None, folder: Optional["DatasetFolder"] = None, table_name: Optional[object] = None, - table: Optional[object] = None, - schema_type_properties_schema: Optional[object] = None, **kwargs ): - super(ImpalaObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'ImpalaObject' + super(OdbcTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'OdbcTable' # type: str self.table_name = table_name - self.table = table - self.schema_type_properties_schema = schema_type_properties_schema -class InformixLinkedService(LinkedService): - """Informix linked service. +class Office365Dataset(Dataset): + """The Office365 account. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Type of dataset.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. + :param description: Dataset description. :type description: str - :param parameters: Parameters for linked service. + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. + :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] - :param connection_string: Required. The non-access credential portion of the connection string - as well as an optional encrypted credential. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param authentication_type: Type of authentication used to connect to the Informix as ODBC data - store. Possible values are: Anonymous and Basic. Type: string (or Expression with resultType - string). - :type authentication_type: object - :param credential: The access credential portion of the connection string specified in driver- - specific property-value format. - :type credential: ~azure.synapse.artifacts.models.SecretBase - :param user_name: User name for Basic authentication. Type: string (or Expression with - resultType string). - :type user_name: object - :param password: Password for Basic authentication. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: Required. Name of the dataset to extract from Office 365. Type: string (or + Expression with resultType string). + :type table_name: object + :param predicate: A predicate expression that can be used to filter the specific rows to + extract from Office 365. Type: string (or Expression with resultType string). + :type predicate: object """ _validation = { 'type': {'required': True}, - 'connection_string': {'required': True}, + 'linked_service_name': {'required': True}, + 'table_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, - 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'predicate': {'key': 'typeProperties.predicate', 'type': 'object'}, } def __init__( self, *, - connection_string: object, + linked_service_name: "LinkedServiceReference", + table_name: object, additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, - authentication_type: Optional[object] = None, - credential: Optional["SecretBase"] = None, - user_name: Optional[object] = None, - password: Optional["SecretBase"] = None, - encrypted_credential: Optional[object] = None, + folder: Optional["DatasetFolder"] = None, + predicate: Optional[object] = None, **kwargs ): - super(InformixLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Informix' - self.connection_string = connection_string - self.authentication_type = authentication_type - self.credential = credential - self.user_name = user_name - self.password = password - self.encrypted_credential = encrypted_credential + super(Office365Dataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'Office365Table' # type: str + self.table_name = table_name + self.predicate = predicate -class InformixTableDataset(Dataset): - """The Informix table dataset. +class Office365LinkedService(LinkedService): + """Office365 linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Type of linked service.Constant filled by server. :type type: str - :param description: Dataset description. + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. + :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. + :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The Informix table name. Type: string (or Expression with resultType - string). - :type table_name: object + :param office365_tenant_id: Required. Azure tenant ID to which the Office 365 account belongs. + Type: string (or Expression with resultType string). + :type office365_tenant_id: object + :param service_principal_tenant_id: Required. Specify the tenant information under which your + Azure AD web application resides. Type: string (or Expression with resultType string). + :type service_principal_tenant_id: object + :param service_principal_id: Required. Specify the application's client ID. Type: string (or + Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: Required. Specify the application's key. + :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, + 'office365_tenant_id': {'required': True}, + 'service_principal_tenant_id': {'required': True}, + 'service_principal_id': {'required': True}, + 'service_principal_key': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'office365_tenant_id': {'key': 'typeProperties.office365TenantId', 'type': 'object'}, + 'service_principal_tenant_id': {'key': 'typeProperties.servicePrincipalTenantId', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, *, - linked_service_name: "LinkedServiceReference", + office365_tenant_id: object, + service_principal_tenant_id: object, + service_principal_id: object, + service_principal_key: "SecretBase", additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, + encrypted_credential: Optional[object] = None, **kwargs ): - super(InformixTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'InformixTable' - self.table_name = table_name - + super(Office365LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'Office365' # type: str + self.office365_tenant_id = office365_tenant_id + self.service_principal_tenant_id = service_principal_tenant_id + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.encrypted_credential = encrypted_credential -class IntegrationRuntimeReference(msrest.serialization.Model): - """Integration runtime reference type. - Variables are only populated by the server, and will be ignored when sending a request. +class Office365Source(CopySource): + """A copy activity source for an Office 365 service. All required parameters must be populated in order to send to Azure. - :ivar type: Required. Type of integration runtime. Default value: - "IntegrationRuntimeReference". - :vartype type: str - :param reference_name: Required. Reference integration runtime name. - :type reference_name: str - :param parameters: Arguments for integration runtime. - :type parameters: dict[str, object] + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param allowed_groups: The groups containing all the users. Type: array of strings (or + Expression with resultType array of strings). + :type allowed_groups: object + :param user_scope_filter_uri: The user scope uri. Type: string (or Expression with resultType + string). + :type user_scope_filter_uri: object + :param date_filter_column: The Column to apply the :code:`` and + :code:``. Type: string (or Expression with resultType string). + :type date_filter_column: object + :param start_time: Start time of the requested range for this dataset. Type: string (or + Expression with resultType string). + :type start_time: object + :param end_time: End time of the requested range for this dataset. Type: string (or Expression + with resultType string). + :type end_time: object + :param output_columns: The columns to be read out from the Office 365 table. Type: array of + objects (or Expression with resultType array of objects). Example: [ { "name": "Id" }, { + "name": "CreatedDateTime" } ]. + :type output_columns: object """ _validation = { - 'type': {'required': True, 'constant': True}, - 'reference_name': {'required': True}, + 'type': {'required': True}, } _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'reference_name': {'key': 'referenceName', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'allowed_groups': {'key': 'allowedGroups', 'type': 'object'}, + 'user_scope_filter_uri': {'key': 'userScopeFilterUri', 'type': 'object'}, + 'date_filter_column': {'key': 'dateFilterColumn', 'type': 'object'}, + 'start_time': {'key': 'startTime', 'type': 'object'}, + 'end_time': {'key': 'endTime', 'type': 'object'}, + 'output_columns': {'key': 'outputColumns', 'type': 'object'}, } - type = "IntegrationRuntimeReference" - def __init__( self, *, - reference_name: str, - parameters: Optional[Dict[str, object]] = None, - **kwargs - ): - super(IntegrationRuntimeReference, self).__init__(**kwargs) - self.reference_name = reference_name - self.parameters = parameters + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + allowed_groups: Optional[object] = None, + user_scope_filter_uri: Optional[object] = None, + date_filter_column: Optional[object] = None, + start_time: Optional[object] = None, + end_time: Optional[object] = None, + output_columns: Optional[object] = None, + **kwargs + ): + super(Office365Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'Office365Source' # type: str + self.allowed_groups = allowed_groups + self.user_scope_filter_uri = user_scope_filter_uri + self.date_filter_column = date_filter_column + self.start_time = start_time + self.end_time = end_time + self.output_columns = output_columns -class JiraLinkedService(LinkedService): - """Jira Service linked service. +class OracleLinkedService(LinkedService): + """Oracle database. All required parameters must be populated in order to send to Azure. @@ -12032,27 +23022,11 @@ class JiraLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param host: Required. The IP address or host name of the Jira service. (e.g. - jira.example.com). - :type host: object - :param port: The TCP port that the Jira server uses to listen for client connections. The - default value is 443 if connecting through HTTPS, or 8080 if connecting through HTTP. - :type port: object - :param username: Required. The user name that you use to access Jira Service. - :type username: object - :param password: The password corresponding to the user name that you provided in the username - field. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using - HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name in the server's - certificate to match the host name of the server when connecting over SSL. The default value is - true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of the server when - connecting over SSL. The default value is true. - :type use_peer_verification: object + :param connection_string: Required. The connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param password: The Azure key vault secret reference of password in connection string. + :type password: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -12061,8 +23035,7 @@ class JiraLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'host': {'required': True}, - 'username': {'required': True}, + 'connection_string': {'required': True}, } _attribute_map = { @@ -12072,116 +23045,167 @@ class JiraLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, *, - host: object, - username: object, + connection_string: object, additional_properties: Optional[Dict[str, object]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, - port: Optional[object] = None, - password: Optional["SecretBase"] = None, - use_encrypted_endpoints: Optional[object] = None, - use_host_verification: Optional[object] = None, - use_peer_verification: Optional[object] = None, + password: Optional["AzureKeyVaultSecretReference"] = None, encrypted_credential: Optional[object] = None, **kwargs ): - super(JiraLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Jira' - self.host = host - self.port = port - self.username = username + super(OracleLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'Oracle' # type: str + self.connection_string = connection_string self.password = password - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification self.encrypted_credential = encrypted_credential -class JiraObjectDataset(Dataset): - """Jira Service dataset. +class OraclePartitionSettings(msrest.serialization.Model): + """The settings that will be leveraged for Oracle source partitioning. + + :param partition_names: Names of the physical partitions of Oracle table. + :type partition_names: object + :param partition_column_name: The name of the column in integer type that will be used for + proceeding range partitioning. Type: string (or Expression with resultType string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of column specified in partitionColumnName that + will be used for proceeding range partitioning. Type: string (or Expression with resultType + string). + :type partition_upper_bound: object + :param partition_lower_bound: The minimum value of column specified in partitionColumnName that + will be used for proceeding range partitioning. Type: string (or Expression with resultType + string). + :type partition_lower_bound: object + """ + + _attribute_map = { + 'partition_names': {'key': 'partitionNames', 'type': 'object'}, + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + } + + def __init__( + self, + *, + partition_names: Optional[object] = None, + partition_column_name: Optional[object] = None, + partition_upper_bound: Optional[object] = None, + partition_lower_bound: Optional[object] = None, + **kwargs + ): + super(OraclePartitionSettings, self).__init__(**kwargs) + self.partition_names = partition_names + self.partition_column_name = partition_column_name + self.partition_upper_bound = partition_upper_bound + self.partition_lower_bound = partition_lower_bound + + +class OracleServiceCloudLinkedService(LinkedService): + """Oracle Service Cloud linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Type of linked service.Constant filled by server. :type type: str - :param description: Dataset description. + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. + :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. + :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :param host: Required. The URL of the Oracle Service Cloud instance. + :type host: object + :param username: Required. The user name that you use to access Oracle Service Cloud server. + :type username: object + :param password: Required. The password corresponding to the user name that you provided in the + username key. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using + HTTPS. The default value is true. Type: boolean (or Expression with resultType boolean). + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name in the server's + certificate to match the host name of the server when connecting over SSL. The default value is + true. Type: boolean (or Expression with resultType boolean). + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of the server when + connecting over SSL. The default value is true. Type: boolean (or Expression with resultType + boolean). + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, + 'host': {'required': True}, + 'username': {'required': True}, + 'password': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, *, - linked_service_name: "LinkedServiceReference", + host: object, + username: object, + password: "SecretBase", additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, + use_encrypted_endpoints: Optional[object] = None, + use_host_verification: Optional[object] = None, + use_peer_verification: Optional[object] = None, + encrypted_credential: Optional[object] = None, **kwargs ): - super(JiraObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'JiraObject' - self.table_name = table_name + super(OracleServiceCloudLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'OracleServiceCloud' # type: str + self.host = host + self.username = username + self.password = password + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential -class JsonDataset(Dataset): - """Json dataset. +class OracleServiceCloudObjectDataset(Dataset): + """Oracle Service Cloud dataset. All required parameters must be populated in order to send to Azure. @@ -12207,16 +23231,8 @@ class JsonDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param location: The location of the json data storage. - :type location: ~azure.synapse.artifacts.models.DatasetLocation - :param encoding_name: The code page name of the preferred encoding. If not specified, the - default value is UTF-8, unless BOM denotes another Unicode encoding. Refer to the name column - of the table in the following link to set supported values: - https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with - resultType string). - :type encoding_name: object - :param compression: The data compression method used for the json dataset. - :type compression: ~azure.synapse.artifacts.models.DatasetCompression + :param table_name: The table name. Type: string (or Expression with resultType string). + :type table_name: object """ _validation = { @@ -12234,9 +23250,7 @@ class JsonDataset(Dataset): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'encoding_name': {'key': 'typeProperties.encodingName', 'type': 'object'}, - 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } def __init__( @@ -12250,365 +23264,285 @@ def __init__( parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, folder: Optional["DatasetFolder"] = None, - location: Optional["DatasetLocation"] = None, - encoding_name: Optional[object] = None, - compression: Optional["DatasetCompression"] = None, - **kwargs - ): - super(JsonDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'Json' - self.location = location - self.encoding_name = encoding_name - self.compression = compression - - -class LinkedServiceDebugResource(SubResourceDebugResource): - """Linked service debug resource. - - All required parameters must be populated in order to send to Azure. - - :param name: The resource name. - :type name: str - :param properties: Required. Properties of linked service. - :type properties: ~azure.synapse.artifacts.models.LinkedService - """ - - _validation = { - 'properties': {'required': True}, - } - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'LinkedService'}, - } - - def __init__( - self, - *, - properties: "LinkedService", - name: Optional[str] = None, - **kwargs - ): - super(LinkedServiceDebugResource, self).__init__(name=name, **kwargs) - self.properties = properties - - -class LinkedServiceListResponse(msrest.serialization.Model): - """A list of linked service resources. - - All required parameters must be populated in order to send to Azure. - - :param value: Required. List of linked services. - :type value: list[~azure.synapse.artifacts.models.LinkedServiceResource] - :param next_link: The link to the next page of results, if any remaining results exist. - :type next_link: str - """ - - _validation = { - 'value': {'required': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': '[LinkedServiceResource]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - def __init__( - self, - *, - value: List["LinkedServiceResource"], - next_link: Optional[str] = None, - **kwargs - ): - super(LinkedServiceListResponse, self).__init__(**kwargs) - self.value = value - self.next_link = next_link - - -class LinkedServiceReference(msrest.serialization.Model): - """Linked service reference type. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar type: Required. Linked service reference type. Default value: "LinkedServiceReference". - :vartype type: str - :param reference_name: Required. Reference LinkedService name. - :type reference_name: str - :param parameters: Arguments for LinkedService. - :type parameters: dict[str, object] - """ - - _validation = { - 'type': {'required': True, 'constant': True}, - 'reference_name': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'reference_name': {'key': 'referenceName', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{object}'}, - } - - type = "LinkedServiceReference" - - def __init__( - self, - *, - reference_name: str, - parameters: Optional[Dict[str, object]] = None, + table_name: Optional[object] = None, **kwargs ): - super(LinkedServiceReference, self).__init__(**kwargs) - self.reference_name = reference_name - self.parameters = parameters - + super(OracleServiceCloudObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'OracleServiceCloudObject' # type: str + self.table_name = table_name -class LinkedServiceResource(SubResource): - """Linked service resource type. - Variables are only populated by the server, and will be ignored when sending a request. +class OracleServiceCloudSource(TabularSource): + """A copy activity Oracle Service Cloud source. All required parameters must be populated in order to send to Azure. - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - :param properties: Required. Properties of linked service. - :type properties: ~azure.synapse.artifacts.models.LinkedService + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'properties': {'required': True}, + 'type': {'required': True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'LinkedService'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, } def __init__( self, *, - properties: "LinkedService", + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, **kwargs ): - super(LinkedServiceResource, self).__init__(**kwargs) - self.properties = properties + super(OracleServiceCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'OracleServiceCloudSource' # type: str + self.query = query -class LogStorageSettings(msrest.serialization.Model): - """Log storage settings. +class OracleSink(CopySink): + """A copy activity Oracle sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param linked_service_name: Required. Log storage linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param path: The path to storage for storing detailed logs of activity execution. Type: string - (or Expression with resultType string). - :type path: object + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType + string). + :type pre_copy_script: object """ _validation = { - 'linked_service_name': {'required': True}, + 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'path': {'key': 'path', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } def __init__( self, *, - linked_service_name: "LinkedServiceReference", additional_properties: Optional[Dict[str, object]] = None, - path: Optional[object] = None, - **kwargs - ): - super(LogStorageSettings, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.linked_service_name = linked_service_name - self.path = path + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + pre_copy_script: Optional[object] = None, + **kwargs + ): + super(OracleSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'OracleSink' # type: str + self.pre_copy_script = pre_copy_script -class LookupActivity(ExecutionActivity): - """Lookup activity. +class OracleSource(CopySource): + """A copy activity Oracle source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param source: Required. Dataset-specific source properties, same as copy activity source. - :type source: ~azure.synapse.artifacts.models.CopySource - :param dataset: Required. Lookup activity dataset reference. - :type dataset: ~azure.synapse.artifacts.models.DatasetReference - :param first_row_only: Whether to return first row or all rows. Default value is true. Type: - boolean (or Expression with resultType boolean). - :type first_row_only: object + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param oracle_reader_query: Oracle reader query. Type: string (or Expression with resultType + string). + :type oracle_reader_query: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param partition_option: The partition mechanism that will be used for Oracle read in parallel. + Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". + :type partition_option: str or ~azure.synapse.artifacts.models.OraclePartitionOption + :param partition_settings: The settings that will be leveraged for Oracle source partitioning. + :type partition_settings: ~azure.synapse.artifacts.models.OraclePartitionSettings """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, - 'source': {'required': True}, - 'dataset': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'source': {'key': 'typeProperties.source', 'type': 'CopySource'}, - 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, - 'first_row_only': {'key': 'typeProperties.firstRowOnly', 'type': 'object'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'oracle_reader_query': {'key': 'oracleReaderQuery', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'OraclePartitionSettings'}, } def __init__( self, *, - name: str, - source: "CopySource", - dataset: "DatasetReference", additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - depends_on: Optional[List["ActivityDependency"]] = None, - user_properties: Optional[List["UserProperty"]] = None, - linked_service_name: Optional["LinkedServiceReference"] = None, - policy: Optional["ActivityPolicy"] = None, - first_row_only: Optional[object] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + oracle_reader_query: Optional[object] = None, + query_timeout: Optional[object] = None, + partition_option: Optional[Union[str, "OraclePartitionOption"]] = None, + partition_settings: Optional["OraclePartitionSettings"] = None, **kwargs ): - super(LookupActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type: str = 'Lookup' - self.source = source - self.dataset = dataset - self.first_row_only = first_row_only + super(OracleSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'OracleSource' # type: str + self.oracle_reader_query = oracle_reader_query + self.query_timeout = query_timeout + self.partition_option = partition_option + self.partition_settings = partition_settings -class MagentoLinkedService(LinkedService): - """Magento server linked service. +class OracleTableDataset(Dataset): + """The on-premises Oracle database dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Type of dataset.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. + :param description: Dataset description. :type description: str - :param parameters: Parameters for linked service. + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. + :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] - :param host: Required. The URL of the Magento instance. (i.e. 192.168.222.110/magento3). - :type host: object - :param access_token: The access token from Magento. - :type access_token: ~azure.synapse.artifacts.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using - HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name in the server's - certificate to match the host name of the server when connecting over SSL. The default value is - true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of the server when - connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: This property will be retired. Please consider using schema + table + properties instead. + :type table_name: object + :param schema_type_properties_schema: The schema name of the on-premises Oracle database. Type: + string (or Expression with resultType string). + :type schema_type_properties_schema: object + :param table: The table name of the on-premises Oracle database. Type: string (or Expression + with resultType string). + :type table: object """ _validation = { 'type': {'required': True}, - 'host': {'required': True}, + 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, } def __init__( self, *, - host: object, + linked_service_name: "LinkedServiceReference", additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, - access_token: Optional["SecretBase"] = None, - use_encrypted_endpoints: Optional[object] = None, - use_host_verification: Optional[object] = None, - use_peer_verification: Optional[object] = None, - encrypted_credential: Optional[object] = None, + folder: Optional["DatasetFolder"] = None, + table_name: Optional[object] = None, + schema_type_properties_schema: Optional[object] = None, + table: Optional[object] = None, **kwargs ): - super(MagentoLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Magento' - self.host = host - self.access_token = access_token - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification - self.encrypted_credential = encrypted_credential + super(OracleTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'OracleTable' # type: str + self.table_name = table_name + self.schema_type_properties_schema = schema_type_properties_schema + self.table = table -class MagentoObjectDataset(Dataset): - """Magento server dataset. +class OrcDataset(Dataset): + """ORC dataset. All required parameters must be populated in order to send to Azure. @@ -12634,8 +23568,10 @@ class MagentoObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :param location: The location of the ORC data storage. + :type location: ~azure.synapse.artifacts.models.DatasetLocation + :param orc_compression_codec: Possible values include: "none", "zlib", "snappy". + :type orc_compression_codec: str or ~azure.synapse.artifacts.models.OrcCompressionCodec """ _validation = { @@ -12653,50 +23589,152 @@ class MagentoObjectDataset(Dataset): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'orc_compression_codec': {'key': 'typeProperties.orcCompressionCodec', 'type': 'str'}, + } + + def __init__( + self, + *, + linked_service_name: "LinkedServiceReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + location: Optional["DatasetLocation"] = None, + orc_compression_codec: Optional[Union[str, "OrcCompressionCodec"]] = None, + **kwargs + ): + super(OrcDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'Orc' # type: str + self.location = location + self.orc_compression_codec = orc_compression_codec + + +class OrcFormat(DatasetStorageFormat): + """The data stored in Optimized Row Columnar (ORC) format. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage format.Constant filled by server. + :type type: str + :param serializer: Serializer. Type: string (or Expression with resultType string). + :type serializer: object + :param deserializer: Deserializer. Type: string (or Expression with resultType string). + :type deserializer: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'serializer': {'key': 'serializer', 'type': 'object'}, + 'deserializer': {'key': 'deserializer', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + serializer: Optional[object] = None, + deserializer: Optional[object] = None, + **kwargs + ): + super(OrcFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs) + self.type = 'OrcFormat' # type: str + + +class OrcSink(CopySink): + """A copy activity ORC sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param store_settings: ORC store settings. + :type store_settings: ~azure.synapse.artifacts.models.StoreWriteSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, } def __init__( self, *, - linked_service_name: "LinkedServiceReference", additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + store_settings: Optional["StoreWriteSettings"] = None, **kwargs ): - super(MagentoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'MagentoObject' - self.table_name = table_name + super(OrcSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'OrcSink' # type: str + self.store_settings = store_settings -class MappingDataFlow(DataFlow): - """Mapping data flow. +class OrcSource(CopySource): + """A copy activity ORC source. All required parameters must be populated in order to send to Azure. - :param type: Required. Type of data flow.Constant filled by server. + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param description: The description of the data flow. - :type description: str - :param annotations: List of tags that can be used for describing the data flow. - :type annotations: list[object] - :param folder: The folder that this data flow is in. If not specified, Data flow will appear at - the root level. - :type folder: ~azure.synapse.artifacts.models.DataFlowFolder - :param sources: List of sources in data flow. - :type sources: list[~azure.synapse.artifacts.models.DataFlowSource] - :param sinks: List of sinks in data flow. - :type sinks: list[~azure.synapse.artifacts.models.DataFlowSink] - :param transformations: List of transformations in data flow. - :type transformations: list[~azure.synapse.artifacts.models.Transformation] - :param script: DataFlow script. - :type script: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param store_settings: ORC store settings. + :type store_settings: ~azure.synapse.artifacts.models.StoreReadSettings """ _validation = { @@ -12704,63 +23742,39 @@ class MappingDataFlow(DataFlow): } _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DataFlowFolder'}, - 'sources': {'key': 'typeProperties.sources', 'type': '[DataFlowSource]'}, - 'sinks': {'key': 'typeProperties.sinks', 'type': '[DataFlowSink]'}, - 'transformations': {'key': 'typeProperties.transformations', 'type': '[Transformation]'}, - 'script': {'key': 'typeProperties.script', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, } def __init__( self, *, - description: Optional[str] = None, - annotations: Optional[List[object]] = None, - folder: Optional["DataFlowFolder"] = None, - sources: Optional[List["DataFlowSource"]] = None, - sinks: Optional[List["DataFlowSink"]] = None, - transformations: Optional[List["Transformation"]] = None, - script: Optional[str] = None, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + store_settings: Optional["StoreReadSettings"] = None, **kwargs ): - super(MappingDataFlow, self).__init__(description=description, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'MappingDataFlow' - self.sources = sources - self.sinks = sinks - self.transformations = transformations - self.script = script + super(OrcSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'OrcSource' # type: str + self.store_settings = store_settings -class MariaDBLinkedService(LinkedService): - """MariaDB server linked service. +class ParameterSpecification(msrest.serialization.Model): + """Definition of a single parameter for an entity. All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: An ODBC connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param pwd: The Azure key vault secret reference of password in connection string. - :type pwd: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param type: Required. Parameter type. Possible values include: "Object", "String", "Int", + "Float", "Bool", "Array", "SecureString". + :type type: str or ~azure.synapse.artifacts.models.ParameterType + :param default_value: Default value of parameter. + :type default_value: object """ _validation = { @@ -12768,39 +23782,24 @@ class MariaDBLinkedService(LinkedService): } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'default_value': {'key': 'defaultValue', 'type': 'object'}, } def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, - description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - connection_string: Optional[object] = None, - pwd: Optional["AzureKeyVaultSecretReference"] = None, - encrypted_credential: Optional[object] = None, + type: Union[str, "ParameterType"], + default_value: Optional[object] = None, **kwargs ): - super(MariaDBLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'MariaDB' - self.connection_string = connection_string - self.pwd = pwd - self.encrypted_credential = encrypted_credential + super(ParameterSpecification, self).__init__(**kwargs) + self.type = type + self.default_value = default_value -class MariaDBTableDataset(Dataset): - """MariaDB server dataset. +class ParquetDataset(Dataset): + """Parquet dataset. All required parameters must be populated in order to send to Azure. @@ -12826,8 +23825,10 @@ class MariaDBTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :param location: The location of the parquet storage. + :type location: ~azure.synapse.artifacts.models.DatasetLocation + :param compression_codec: Possible values include: "none", "gzip", "snappy", "lzo". + :type compression_codec: str or ~azure.synapse.artifacts.models.ParquetCompressionCodec """ _validation = { @@ -12845,7 +23846,8 @@ class MariaDBTableDataset(Dataset): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'str'}, } def __init__( @@ -12859,174 +23861,169 @@ def __init__( parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, + location: Optional["DatasetLocation"] = None, + compression_codec: Optional[Union[str, "ParquetCompressionCodec"]] = None, **kwargs ): - super(MariaDBTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'MariaDBTable' - self.table_name = table_name + super(ParquetDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'Parquet' # type: str + self.location = location + self.compression_codec = compression_codec -class MarketoLinkedService(LinkedService): - """Marketo server linked service. +class ParquetFormat(DatasetStorageFormat): + """The data stored in Parquet format. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Type of dataset storage format.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param endpoint: Required. The endpoint of the Marketo server. (i.e. 123-ABC-321.mktorest.com). - :type endpoint: object - :param client_id: Required. The client Id of your Marketo service. - :type client_id: object - :param client_secret: The client secret of your Marketo service. - :type client_secret: ~azure.synapse.artifacts.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using - HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name in the server's - certificate to match the host name of the server when connecting over SSL. The default value is - true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of the server when - connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param serializer: Serializer. Type: string (or Expression with resultType string). + :type serializer: object + :param deserializer: Deserializer. Type: string (or Expression with resultType string). + :type deserializer: object """ _validation = { 'type': {'required': True}, - 'endpoint': {'required': True}, - 'client_id': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'serializer': {'key': 'serializer', 'type': 'object'}, + 'deserializer': {'key': 'deserializer', 'type': 'object'}, } def __init__( self, *, - endpoint: object, - client_id: object, additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, - description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - client_secret: Optional["SecretBase"] = None, - use_encrypted_endpoints: Optional[object] = None, - use_host_verification: Optional[object] = None, - use_peer_verification: Optional[object] = None, - encrypted_credential: Optional[object] = None, + serializer: Optional[object] = None, + deserializer: Optional[object] = None, + **kwargs + ): + super(ParquetFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs) + self.type = 'ParquetFormat' # type: str + + +class ParquetSink(CopySink): + """A copy activity Parquet sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param store_settings: Parquet store settings. + :type store_settings: ~azure.synapse.artifacts.models.StoreWriteSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + store_settings: Optional["StoreWriteSettings"] = None, **kwargs ): - super(MarketoLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Marketo' - self.endpoint = endpoint - self.client_id = client_id - self.client_secret = client_secret - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification - self.encrypted_credential = encrypted_credential + super(ParquetSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'ParquetSink' # type: str + self.store_settings = store_settings -class MarketoObjectDataset(Dataset): - """Marketo server dataset. +class ParquetSource(CopySource): + """A copy activity Parquet source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param store_settings: Parquet store settings. + :type store_settings: ~azure.synapse.artifacts.models.StoreReadSettings """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, } def __init__( self, *, - linked_service_name: "LinkedServiceReference", additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + store_settings: Optional["StoreReadSettings"] = None, **kwargs ): - super(MarketoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'MarketoObject' - self.table_name = table_name + super(ParquetSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'ParquetSource' # type: str + self.store_settings = store_settings -class MicrosoftAccessLinkedService(LinkedService): - """Microsoft Access linked service. +class PaypalLinkedService(LinkedService): + """Paypal Service linked service. All required parameters must be populated in order to send to Azure. @@ -13043,22 +24040,22 @@ class MicrosoftAccessLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param connection_string: Required. The non-access credential portion of the connection string - as well as an optional encrypted credential. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param authentication_type: Type of authentication used to connect to the Microsoft Access as - ODBC data store. Possible values are: Anonymous and Basic. Type: string (or Expression with - resultType string). - :type authentication_type: object - :param credential: The access credential portion of the connection string specified in driver- - specific property-value format. - :type credential: ~azure.synapse.artifacts.models.SecretBase - :param user_name: User name for Basic authentication. Type: string (or Expression with - resultType string). - :type user_name: object - :param password: Password for Basic authentication. - :type password: ~azure.synapse.artifacts.models.SecretBase + :param host: Required. The URL of the PayPal instance. (i.e. api.sandbox.paypal.com). + :type host: object + :param client_id: Required. The client ID associated with your PayPal application. + :type client_id: object + :param client_secret: The client secret associated with your PayPal application. + :type client_secret: ~azure.synapse.artifacts.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using + HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name in the server's + certificate to match the host name of the server when connecting over SSL. The default value is + true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of the server when + connecting over SSL. The default value is true. + :type use_peer_verification: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -13067,7 +24064,8 @@ class MicrosoftAccessLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'connection_string': {'required': True}, + 'host': {'required': True}, + 'client_id': {'required': True}, } _attribute_map = { @@ -13077,42 +24075,45 @@ class MicrosoftAccessLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, - 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, *, - connection_string: object, + host: object, + client_id: object, additional_properties: Optional[Dict[str, object]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, - authentication_type: Optional[object] = None, - credential: Optional["SecretBase"] = None, - user_name: Optional[object] = None, - password: Optional["SecretBase"] = None, + client_secret: Optional["SecretBase"] = None, + use_encrypted_endpoints: Optional[object] = None, + use_host_verification: Optional[object] = None, + use_peer_verification: Optional[object] = None, encrypted_credential: Optional[object] = None, **kwargs ): - super(MicrosoftAccessLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'MicrosoftAccess' - self.connection_string = connection_string - self.authentication_type = authentication_type - self.credential = credential - self.user_name = user_name - self.password = password + super(PaypalLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'Paypal' # type: str + self.host = host + self.client_id = client_id + self.client_secret = client_secret + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification self.encrypted_credential = encrypted_credential -class MicrosoftAccessTableDataset(Dataset): - """The Microsoft Access table dataset. +class PaypalObjectDataset(Dataset): + """Paypal Service dataset. All required parameters must be populated in order to send to Azure. @@ -13138,8 +24139,7 @@ class MicrosoftAccessTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The Microsoft Access table name. Type: string (or Expression with resultType - string). + :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object """ @@ -13175,83 +24175,70 @@ def __init__( table_name: Optional[object] = None, **kwargs ): - super(MicrosoftAccessTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'MicrosoftAccessTable' + super(PaypalObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'PaypalObject' # type: str self.table_name = table_name -class MongoDbCollectionDataset(Dataset): - """The MongoDB database dataset. +class PaypalSource(TabularSource): + """A copy activity Paypal Service source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param collection_name: Required. The table name of the MongoDB database. Type: string (or - Expression with resultType string). - :type collection_name: object + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, - 'collection_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'collection_name': {'key': 'typeProperties.collectionName', 'type': 'object'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, } def __init__( self, *, - linked_service_name: "LinkedServiceReference", - collection_name: object, additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, **kwargs ): - super(MongoDbCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'MongoDbCollection' - self.collection_name = collection_name + super(PaypalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'PaypalSource' # type: str + self.query = query -class MongoDbLinkedService(LinkedService): - """Linked service for MongoDb data source. +class PhoenixLinkedService(LinkedService): + """Phoenix server linked service. All required parameters must be populated in order to send to Azure. @@ -13268,31 +24255,39 @@ class MongoDbLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param server: Required. The IP address or server name of the MongoDB server. Type: string (or - Expression with resultType string). - :type server: object - :param authentication_type: The authentication type to be used to connect to the MongoDB - database. Possible values include: "Basic", "Anonymous". - :type authentication_type: str or ~azure.synapse.artifacts.models.MongoDbAuthenticationType - :param database_name: Required. The name of the MongoDB database that you want to access. Type: - string (or Expression with resultType string). - :type database_name: object - :param username: Username for authentication. Type: string (or Expression with resultType - string). + :param host: Required. The IP address or host name of the Phoenix server. (i.e. + 192.168.222.160). + :type host: object + :param port: The TCP port that the Phoenix server uses to listen for client connections. The + default value is 8765. + :type port: object + :param http_path: The partial URL corresponding to the Phoenix server. (i.e. + /gateway/sandbox/phoenix/version). The default value is hbasephoenix if using + WindowsAzureHDInsightService. + :type http_path: object + :param authentication_type: Required. The authentication mechanism used to connect to the + Phoenix server. Possible values include: "Anonymous", "UsernameAndPassword", + "WindowsAzureHDInsightService". + :type authentication_type: str or ~azure.synapse.artifacts.models.PhoenixAuthenticationType + :param username: The user name used to connect to the Phoenix server. :type username: object - :param password: Password for authentication. + :param password: The password corresponding to the user name. :type password: ~azure.synapse.artifacts.models.SecretBase - :param auth_source: Database to verify the username and password. Type: string (or Expression - with resultType string). - :type auth_source: object - :param port: The TCP port number that the MongoDB server uses to listen for client connections. - The default value is 27017. Type: integer (or Expression with resultType integer), minimum: 0. - :type port: object :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The - default value is false. Type: boolean (or Expression with resultType boolean). + default value is false. :type enable_ssl: object + :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for + verifying the server when connecting over SSL. This property can only be set when using SSL on + self-hosted IR. The default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust + store or from a specified PEM file. The default value is false. + :type use_system_trust_store: object + :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate + name to match the host name of the server when connecting over SSL. The default value is false. + :type allow_host_name_cn_mismatch: object :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from - the server. The default value is false. Type: boolean (or Expression with resultType boolean). + the server. The default value is false. :type allow_self_signed_server_cert: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with @@ -13302,8 +24297,8 @@ class MongoDbLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'server': {'required': True}, - 'database_name': {'required': True}, + 'host': {'required': True}, + 'authentication_type': {'required': True}, } _attribute_map = { @@ -13313,14 +24308,16 @@ class MongoDbLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'database_name': {'key': 'typeProperties.databaseName', 'type': 'object'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'auth_source': {'key': 'typeProperties.authSource', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -13328,39 +24325,43 @@ class MongoDbLinkedService(LinkedService): def __init__( self, *, - server: object, - database_name: object, + host: object, + authentication_type: Union[str, "PhoenixAuthenticationType"], additional_properties: Optional[Dict[str, object]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, - authentication_type: Optional[Union[str, "MongoDbAuthenticationType"]] = None, + port: Optional[object] = None, + http_path: Optional[object] = None, username: Optional[object] = None, password: Optional["SecretBase"] = None, - auth_source: Optional[object] = None, - port: Optional[object] = None, enable_ssl: Optional[object] = None, + trusted_cert_path: Optional[object] = None, + use_system_trust_store: Optional[object] = None, + allow_host_name_cn_mismatch: Optional[object] = None, allow_self_signed_server_cert: Optional[object] = None, encrypted_credential: Optional[object] = None, **kwargs ): - super(MongoDbLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'MongoDb' - self.server = server + super(PhoenixLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'Phoenix' # type: str + self.host = host + self.port = port + self.http_path = http_path self.authentication_type = authentication_type - self.database_name = database_name self.username = username self.password = password - self.auth_source = auth_source - self.port = port self.enable_ssl = enable_ssl + self.trusted_cert_path = trusted_cert_path + self.use_system_trust_store = use_system_trust_store + self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch self.allow_self_signed_server_cert = allow_self_signed_server_cert self.encrypted_credential = encrypted_credential -class MongoDbV2CollectionDataset(Dataset): - """The MongoDB database dataset. +class PhoenixObjectDataset(Dataset): + """Phoenix server dataset. All required parameters must be populated in order to send to Azure. @@ -13386,15 +24387,20 @@ class MongoDbV2CollectionDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param collection: Required. The collection name of the MongoDB database. Type: string (or + :param table_name: This property will be retired. Please consider using schema + table + properties instead. + :type table_name: object + :param table: The table name of the Phoenix. Type: string (or Expression with resultType + string). + :type table: object + :param schema_type_properties_schema: The schema name of the Phoenix. Type: string (or Expression with resultType string). - :type collection: object + :type schema_type_properties_schema: object """ _validation = { 'type': {'required': True}, 'linked_service_name': {'required': True}, - 'collection': {'required': True}, } _attribute_map = { @@ -13407,14 +24413,15 @@ class MongoDbV2CollectionDataset(Dataset): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } def __init__( self, *, linked_service_name: "LinkedServiceReference", - collection: object, additional_properties: Optional[Dict[str, object]] = None, description: Optional[str] = None, structure: Optional[object] = None, @@ -13422,1069 +24429,1104 @@ def __init__( parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, folder: Optional["DatasetFolder"] = None, + table_name: Optional[object] = None, + table: Optional[object] = None, + schema_type_properties_schema: Optional[object] = None, **kwargs ): - super(MongoDbV2CollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'MongoDbV2Collection' - self.collection = collection + super(PhoenixObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'PhoenixObject' # type: str + self.table_name = table_name + self.table = table + self.schema_type_properties_schema = schema_type_properties_schema -class MongoDbV2LinkedService(LinkedService): - """Linked service for MongoDB data source. +class PhoenixSource(TabularSource): + """A copy activity Phoenix server source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: Required. The MongoDB connection string. Type: string, SecureString - or AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param database: Required. The name of the MongoDB database that you want to access. Type: - string (or Expression with resultType string). - :type database: object + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object """ _validation = { 'type': {'required': True}, - 'connection_string': {'required': True}, - 'database': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, } def __init__( self, *, - connection_string: object, - database: object, additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, - description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, **kwargs ): - super(MongoDbV2LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'MongoDbV2' - self.connection_string = connection_string - self.database = database - - -class Trigger(msrest.serialization.Model): - """Azure Synapse nested object which contains information about creating pipeline run. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: MultiplePipelineTrigger, RerunTumblingWindowTrigger. + super(PhoenixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'PhoenixSource' # type: str + self.query = query - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. +class PipelineFolder(msrest.serialization.Model): + """The folder that this Pipeline is in. If not specified, Pipeline will appear at the root level. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Trigger type.Constant filled by server. - :type type: str - :param description: Trigger description. - :type description: str - :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are - called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". - :vartype runtime_state: str or ~azure.synapse.artifacts.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the trigger. - :type annotations: list[object] + :param name: The name of the folder that this Pipeline is in. + :type name: str """ - _validation = { - 'type': {'required': True}, - 'runtime_state': {'readonly': True}, - } - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - } - - _subtype_map = { - 'type': {'MultiplePipelineTrigger': 'MultiplePipelineTrigger', 'RerunTumblingWindowTrigger': 'RerunTumblingWindowTrigger'} + 'name': {'key': 'name', 'type': 'str'}, } def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - annotations: Optional[List[object]] = None, + name: Optional[str] = None, **kwargs ): - super(Trigger, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.type: str = 'Trigger' - self.description = description - self.runtime_state = None - self.annotations = annotations - + super(PipelineFolder, self).__init__(**kwargs) + self.name = name -class MultiplePipelineTrigger(Trigger): - """Base class for all triggers that support one to many model for trigger to pipeline. - Variables are only populated by the server, and will be ignored when sending a request. +class PipelineListResponse(msrest.serialization.Model): + """A list of pipeline resources. All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Trigger type.Constant filled by server. - :type type: str - :param description: Trigger description. - :type description: str - :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are - called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". - :vartype runtime_state: str or ~azure.synapse.artifacts.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the trigger. - :type annotations: list[object] - :param pipelines: Pipelines that need to be started. - :type pipelines: list[~azure.synapse.artifacts.models.TriggerPipelineReference] + :param value: Required. List of pipelines. + :type value: list[~azure.synapse.artifacts.models.PipelineResource] + :param next_link: The link to the next page of results, if any remaining results exist. + :type next_link: str """ _validation = { - 'type': {'required': True}, - 'runtime_state': {'readonly': True}, + 'value': {'required': True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, + 'value': {'key': 'value', 'type': '[PipelineResource]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, } def __init__( self, *, - additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - annotations: Optional[List[object]] = None, - pipelines: Optional[List["TriggerPipelineReference"]] = None, + value: List["PipelineResource"], + next_link: Optional[str] = None, **kwargs ): - super(MultiplePipelineTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) - self.type: str = 'MultiplePipelineTrigger' - self.pipelines = pipelines + super(PipelineListResponse, self).__init__(**kwargs) + self.value = value + self.next_link = next_link -class MySqlLinkedService(LinkedService): - """Linked service for MySQL data source. +class PipelineReference(msrest.serialization.Model): + """Pipeline reference type. All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: Required. The connection string. - :type connection_string: object - :param password: The Azure key vault secret reference of password in connection string. - :type password: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param type: Required. Pipeline reference type. Possible values include: "PipelineReference". + :type type: str or ~azure.synapse.artifacts.models.PipelineReferenceType + :param reference_name: Required. Reference pipeline name. + :type reference_name: str + :param name: Reference name. + :type name: str """ _validation = { 'type': {'required': True}, - 'connection_string': {'required': True}, + 'reference_name': {'required': True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, } def __init__( self, *, - connection_string: object, - additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, - description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - password: Optional["AzureKeyVaultSecretReference"] = None, - encrypted_credential: Optional[object] = None, + type: Union[str, "PipelineReferenceType"], + reference_name: str, + name: Optional[str] = None, **kwargs ): - super(MySqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'MySql' - self.connection_string = connection_string - self.password = password - self.encrypted_credential = encrypted_credential + super(PipelineReference, self).__init__(**kwargs) + self.type = type + self.reference_name = reference_name + self.name = name -class MySqlTableDataset(Dataset): - """The MySQL table dataset. +class PipelineResource(AzureEntityResource): + """Pipeline resource type. - All required parameters must be populated in order to send to Azure. + Variables are only populated by the server, and will be ignored when sending a request. + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + :ivar etag: Resource Etag. + :vartype etag: str :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. + :param description: The description of the pipeline. :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. + :param activities: List of activities in pipeline. + :type activities: list[~azure.synapse.artifacts.models.Activity] + :param parameters: List of parameters for pipeline. :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. + :param variables: List of variables for pipeline. + :type variables: dict[str, ~azure.synapse.artifacts.models.VariableSpecification] + :param concurrency: The max number of concurrent runs for the pipeline. + :type concurrency: int + :param annotations: List of tags that can be used for describing the Pipeline. :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The MySQL table name. Type: string (or Expression with resultType string). - :type table_name: object + :param run_dimensions: Dimensions emitted by Pipeline. + :type run_dimensions: dict[str, object] + :param folder: The folder that this Pipeline is in. If not specified, Pipeline will appear at + the root level. + :type folder: ~azure.synapse.artifacts.models.PipelineFolder """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'concurrency': {'minimum': 1}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'properties.description', 'type': 'str'}, + 'activities': {'key': 'properties.activities', 'type': '[Activity]'}, + 'parameters': {'key': 'properties.parameters', 'type': '{ParameterSpecification}'}, + 'variables': {'key': 'properties.variables', 'type': '{VariableSpecification}'}, + 'concurrency': {'key': 'properties.concurrency', 'type': 'int'}, + 'annotations': {'key': 'properties.annotations', 'type': '[object]'}, + 'run_dimensions': {'key': 'properties.runDimensions', 'type': '{object}'}, + 'folder': {'key': 'properties.folder', 'type': 'PipelineFolder'}, } def __init__( self, *, - linked_service_name: "LinkedServiceReference", additional_properties: Optional[Dict[str, object]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, + activities: Optional[List["Activity"]] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + variables: Optional[Dict[str, "VariableSpecification"]] = None, + concurrency: Optional[int] = None, annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, + run_dimensions: Optional[Dict[str, object]] = None, + folder: Optional["PipelineFolder"] = None, **kwargs ): - super(MySqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'MySqlTable' - self.table_name = table_name + super(PipelineResource, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.description = description + self.activities = activities + self.parameters = parameters + self.variables = variables + self.concurrency = concurrency + self.annotations = annotations + self.run_dimensions = run_dimensions + self.folder = folder -class NetezzaLinkedService(LinkedService): - """Netezza linked service. +class PipelineRun(msrest.serialization.Model): + """Information about a pipeline run. - All required parameters must be populated in order to send to Azure. + Variables are only populated by the server, and will be ignored when sending a request. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: An ODBC connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param pwd: The Azure key vault secret reference of password in connection string. - :type pwd: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :ivar run_id: Identifier of a run. + :vartype run_id: str + :ivar run_group_id: Identifier that correlates all the recovery runs of a pipeline run. + :vartype run_group_id: str + :ivar is_latest: Indicates if the recovered pipeline run is the latest in its group. + :vartype is_latest: bool + :ivar pipeline_name: The pipeline name. + :vartype pipeline_name: str + :ivar parameters: The full or partial list of parameter name, value pair used in the pipeline + run. + :vartype parameters: dict[str, str] + :ivar invoked_by: Entity that started the pipeline run. + :vartype invoked_by: ~azure.synapse.artifacts.models.PipelineRunInvokedBy + :ivar last_updated: The last updated timestamp for the pipeline run event in ISO8601 format. + :vartype last_updated: ~datetime.datetime + :ivar run_start: The start time of a pipeline run in ISO8601 format. + :vartype run_start: ~datetime.datetime + :ivar run_end: The end time of a pipeline run in ISO8601 format. + :vartype run_end: ~datetime.datetime + :ivar duration_in_ms: The duration of a pipeline run. + :vartype duration_in_ms: int + :ivar status: The status of a pipeline run. + :vartype status: str + :ivar message: The message from a pipeline run. + :vartype message: str """ _validation = { - 'type': {'required': True}, + 'run_id': {'readonly': True}, + 'run_group_id': {'readonly': True}, + 'is_latest': {'readonly': True}, + 'pipeline_name': {'readonly': True}, + 'parameters': {'readonly': True}, + 'invoked_by': {'readonly': True}, + 'last_updated': {'readonly': True}, + 'run_start': {'readonly': True}, + 'run_end': {'readonly': True}, + 'duration_in_ms': {'readonly': True}, + 'status': {'readonly': True}, + 'message': {'readonly': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'run_id': {'key': 'runId', 'type': 'str'}, + 'run_group_id': {'key': 'runGroupId', 'type': 'str'}, + 'is_latest': {'key': 'isLatest', 'type': 'bool'}, + 'pipeline_name': {'key': 'pipelineName', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{str}'}, + 'invoked_by': {'key': 'invokedBy', 'type': 'PipelineRunInvokedBy'}, + 'last_updated': {'key': 'lastUpdated', 'type': 'iso-8601'}, + 'run_start': {'key': 'runStart', 'type': 'iso-8601'}, + 'run_end': {'key': 'runEnd', 'type': 'iso-8601'}, + 'duration_in_ms': {'key': 'durationInMs', 'type': 'int'}, + 'status': {'key': 'status', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, } def __init__( self, *, additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, - description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - connection_string: Optional[object] = None, - pwd: Optional["AzureKeyVaultSecretReference"] = None, - encrypted_credential: Optional[object] = None, **kwargs ): - super(NetezzaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Netezza' - self.connection_string = connection_string - self.pwd = pwd - self.encrypted_credential = encrypted_credential - - -class NetezzaTableDataset(Dataset): - """Netezza dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table - properties instead. - :type table_name: object - :param table: The table name of the Netezza. Type: string (or Expression with resultType - string). - :type table: object - :param schema_type_properties_schema: The schema name of the Netezza. Type: string (or - Expression with resultType string). - :type schema_type_properties_schema: object + super(PipelineRun, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.run_id = None + self.run_group_id = None + self.is_latest = None + self.pipeline_name = None + self.parameters = None + self.invoked_by = None + self.last_updated = None + self.run_start = None + self.run_end = None + self.duration_in_ms = None + self.status = None + self.message = None + + +class PipelineRunInvokedBy(msrest.serialization.Model): + """Provides entity name and id that started the pipeline run. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar name: Name of the entity that started the pipeline run. + :vartype name: str + :ivar id: The ID of the entity that started the run. + :vartype id: str + :ivar invoked_by_type: The type of the entity that started the run. + :vartype invoked_by_type: str """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + 'name': {'readonly': True}, + 'id': {'readonly': True}, + 'invoked_by_type': {'readonly': True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'name': {'key': 'name', 'type': 'str'}, + 'id': {'key': 'id', 'type': 'str'}, + 'invoked_by_type': {'key': 'invokedByType', 'type': 'str'}, } def __init__( self, - *, - linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, - table: Optional[object] = None, - schema_type_properties_schema: Optional[object] = None, **kwargs ): - super(NetezzaTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'NetezzaTable' - self.table_name = table_name - self.table = table - self.schema_type_properties_schema = schema_type_properties_schema + super(PipelineRunInvokedBy, self).__init__(**kwargs) + self.name = None + self.id = None + self.invoked_by_type = None -class Notebook(msrest.serialization.Model): - """Notebook. +class PipelineRunsQueryResponse(msrest.serialization.Model): + """A list pipeline runs. All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param description: The description of the notebook. - :type description: str - :param big_data_pool: Big data pool reference. - :type big_data_pool: ~azure.synapse.artifacts.models.BigDataPoolReference - :param session_properties: Session properties. - :type session_properties: ~azure.synapse.artifacts.models.NotebookSessionProperties - :param metadata: Required. Notebook root-level metadata. - :type metadata: ~azure.synapse.artifacts.models.NotebookMetadata - :param nbformat: Required. Notebook format (major number). Incremented between backwards - incompatible changes to the notebook format. - :type nbformat: int - :param nbformat_minor: Required. Notebook format (minor number). Incremented for backward - compatible changes to the notebook format. - :type nbformat_minor: int - :param cells: Required. Array of cells of the current notebook. - :type cells: list[~azure.synapse.artifacts.models.NotebookCell] + :param value: Required. List of pipeline runs. + :type value: list[~azure.synapse.artifacts.models.PipelineRun] + :param continuation_token: The continuation token for getting the next page of results, if any + remaining results exist, null otherwise. + :type continuation_token: str """ _validation = { - 'metadata': {'required': True}, - 'nbformat': {'required': True}, - 'nbformat_minor': {'required': True}, - 'cells': {'required': True}, + 'value': {'required': True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'big_data_pool': {'key': 'bigDataPool', 'type': 'BigDataPoolReference'}, - 'session_properties': {'key': 'sessionProperties', 'type': 'NotebookSessionProperties'}, - 'metadata': {'key': 'metadata', 'type': 'NotebookMetadata'}, - 'nbformat': {'key': 'nbformat', 'type': 'int'}, - 'nbformat_minor': {'key': 'nbformat_minor', 'type': 'int'}, - 'cells': {'key': 'cells', 'type': '[NotebookCell]'}, + 'value': {'key': 'value', 'type': '[PipelineRun]'}, + 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, } def __init__( self, *, - metadata: "NotebookMetadata", - nbformat: int, - nbformat_minor: int, - cells: List["NotebookCell"], - additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - big_data_pool: Optional["BigDataPoolReference"] = None, - session_properties: Optional["NotebookSessionProperties"] = None, + value: List["PipelineRun"], + continuation_token: Optional[str] = None, **kwargs ): - super(Notebook, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.description = description - self.big_data_pool = big_data_pool - self.session_properties = session_properties - self.metadata = metadata - self.nbformat = nbformat - self.nbformat_minor = nbformat_minor - self.cells = cells - + super(PipelineRunsQueryResponse, self).__init__(**kwargs) + self.value = value + self.continuation_token = continuation_token -class NotebookCell(msrest.serialization.Model): - """Notebook cell. - All required parameters must be populated in order to send to Azure. +class PolybaseSettings(msrest.serialization.Model): + """PolyBase settings. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param cell_type: Required. String identifying the type of cell. - :type cell_type: str - :param metadata: Required. Cell-level metadata. - :type metadata: object - :param source: Required. Contents of the cell, represented as an array of lines. - :type source: list[str] - :param attachments: Attachments associated with the cell. - :type attachments: object - :param outputs: Cell-level output items. - :type outputs: list[~azure.synapse.artifacts.models.NotebookCellOutputItem] + :param reject_type: Reject type. Possible values include: "value", "percentage". + :type reject_type: str or ~azure.synapse.artifacts.models.PolybaseSettingsRejectType + :param reject_value: Specifies the value or the percentage of rows that can be rejected before + the query fails. Type: number (or Expression with resultType number), minimum: 0. + :type reject_value: object + :param reject_sample_value: Determines the number of rows to attempt to retrieve before the + PolyBase recalculates the percentage of rejected rows. Type: integer (or Expression with + resultType integer), minimum: 0. + :type reject_sample_value: object + :param use_type_default: Specifies how to handle missing values in delimited text files when + PolyBase retrieves data from the text file. Type: boolean (or Expression with resultType + boolean). + :type use_type_default: object """ - _validation = { - 'cell_type': {'required': True}, - 'metadata': {'required': True}, - 'source': {'required': True}, - } - _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'cell_type': {'key': 'cell_type', 'type': 'str'}, - 'metadata': {'key': 'metadata', 'type': 'object'}, - 'source': {'key': 'source', 'type': '[str]'}, - 'attachments': {'key': 'attachments', 'type': 'object'}, - 'outputs': {'key': 'outputs', 'type': '[NotebookCellOutputItem]'}, + 'reject_type': {'key': 'rejectType', 'type': 'str'}, + 'reject_value': {'key': 'rejectValue', 'type': 'object'}, + 'reject_sample_value': {'key': 'rejectSampleValue', 'type': 'object'}, + 'use_type_default': {'key': 'useTypeDefault', 'type': 'object'}, } def __init__( self, *, - cell_type: str, - metadata: object, - source: List[str], additional_properties: Optional[Dict[str, object]] = None, - attachments: Optional[object] = None, - outputs: Optional[List["NotebookCellOutputItem"]] = None, + reject_type: Optional[Union[str, "PolybaseSettingsRejectType"]] = None, + reject_value: Optional[object] = None, + reject_sample_value: Optional[object] = None, + use_type_default: Optional[object] = None, **kwargs ): - super(NotebookCell, self).__init__(**kwargs) + super(PolybaseSettings, self).__init__(**kwargs) self.additional_properties = additional_properties - self.cell_type = cell_type - self.metadata = metadata - self.source = source - self.attachments = attachments - self.outputs = outputs + self.reject_type = reject_type + self.reject_value = reject_value + self.reject_sample_value = reject_sample_value + self.use_type_default = use_type_default -class NotebookCellOutputItem(msrest.serialization.Model): - """An item of the notebook cell execution output. +class PostgreSqlLinkedService(LinkedService): + """Linked service for PostgreSQL data source. All required parameters must be populated in order to send to Azure. - :param name: For output_type=stream, determines the name of stream (stdout / stderr). - :type name: str - :param execution_count: Execution sequence number. - :type execution_count: int - :param output_type: Required. Execution, display, or stream outputs. Possible values include: - "execute_result", "display_data", "stream", "error". - :type output_type: str or ~azure.synapse.artifacts.models.CellOutputType - :param text: For output_type=stream, the stream's text output, represented as a string or an - array of strings. - :type text: object - :param data: Output data. Use MIME type as key, and content as value. - :type data: object - :param metadata: Metadata for the output item. - :type metadata: object + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param connection_string: Required. The connection string. + :type connection_string: object + :param password: The Azure key vault secret reference of password in connection string. + :type password: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object """ _validation = { - 'output_type': {'required': True}, + 'type': {'required': True}, + 'connection_string': {'required': True}, } _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'execution_count': {'key': 'execution_count', 'type': 'int'}, - 'output_type': {'key': 'output_type', 'type': 'str'}, - 'text': {'key': 'text', 'type': 'object'}, - 'data': {'key': 'data', 'type': 'object'}, - 'metadata': {'key': 'metadata', 'type': 'object'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, *, - output_type: Union[str, "CellOutputType"], - name: Optional[str] = None, - execution_count: Optional[int] = None, - text: Optional[object] = None, - data: Optional[object] = None, - metadata: Optional[object] = None, + connection_string: object, + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + password: Optional["AzureKeyVaultSecretReference"] = None, + encrypted_credential: Optional[object] = None, **kwargs ): - super(NotebookCellOutputItem, self).__init__(**kwargs) - self.name = name - self.execution_count = execution_count - self.output_type = output_type - self.text = text - self.data = data - self.metadata = metadata + super(PostgreSqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'PostgreSql' # type: str + self.connection_string = connection_string + self.password = password + self.encrypted_credential = encrypted_credential -class NotebookKernelSpec(msrest.serialization.Model): - """Kernel information. +class PostgreSqlSource(TabularSource): + """A copy activity source for PostgreSQL databases. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Name of the kernel specification. - :type name: str - :param display_name: Required. Name to display in UI. - :type display_name: str + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: Database query. Type: string (or Expression with resultType string). + :type query: object """ _validation = { - 'name': {'required': True}, - 'display_name': {'required': True}, + 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'display_name': {'key': 'display_name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, } def __init__( self, *, - name: str, - display_name: str, additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, **kwargs ): - super(NotebookKernelSpec, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.name = name - self.display_name = display_name + super(PostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'PostgreSqlSource' # type: str + self.query = query -class NotebookLanguageInfo(msrest.serialization.Model): - """Language info. +class PostgreSqlTableDataset(Dataset): + """The PostgreSQL table dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. The programming language which this kernel runs. - :type name: str - :param codemirror_mode: The codemirror mode to use for code in this language. - :type codemirror_mode: str + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: This property will be retired. Please consider using schema + table + properties instead. + :type table_name: object + :param table: The PostgreSQL table name. Type: string (or Expression with resultType string). + :type table: object + :param schema_type_properties_schema: The PostgreSQL schema name. Type: string (or Expression + with resultType string). + :type schema_type_properties_schema: object """ _validation = { - 'name': {'required': True}, + 'type': {'required': True}, + 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'codemirror_mode': {'key': 'codemirror_mode', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } def __init__( self, *, - name: str, + linked_service_name: "LinkedServiceReference", additional_properties: Optional[Dict[str, object]] = None, - codemirror_mode: Optional[str] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + table_name: Optional[object] = None, + table: Optional[object] = None, + schema_type_properties_schema: Optional[object] = None, **kwargs ): - super(NotebookLanguageInfo, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.name = name - self.codemirror_mode = codemirror_mode + super(PostgreSqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'PostgreSqlTable' # type: str + self.table_name = table_name + self.table = table + self.schema_type_properties_schema = schema_type_properties_schema -class NotebookListResponse(msrest.serialization.Model): - """A list of Notebook resources. +class PrestoLinkedService(LinkedService): + """Presto server linked service. All required parameters must be populated in order to send to Azure. - :param value: Required. List of Notebooks. - :type value: list[~azure.synapse.artifacts.models.NotebookResource] - :param next_link: The link to the next page of results, if any remaining results exist. - :type next_link: str + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param host: Required. The IP address or host name of the Presto server. (i.e. + 192.168.222.160). + :type host: object + :param server_version: Required. The version of the Presto server. (i.e. 0.148-t). + :type server_version: object + :param catalog: Required. The catalog context for all request against the server. + :type catalog: object + :param port: The TCP port that the Presto server uses to listen for client connections. The + default value is 8080. + :type port: object + :param authentication_type: Required. The authentication mechanism used to connect to the + Presto server. Possible values include: "Anonymous", "LDAP". + :type authentication_type: str or ~azure.synapse.artifacts.models.PrestoAuthenticationType + :param username: The user name used to connect to the Presto server. + :type username: object + :param password: The password corresponding to the user name. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The + default value is false. + :type enable_ssl: object + :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for + verifying the server when connecting over SSL. This property can only be set when using SSL on + self-hosted IR. The default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust + store or from a specified PEM file. The default value is false. + :type use_system_trust_store: object + :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate + name to match the host name of the server when connecting over SSL. The default value is false. + :type allow_host_name_cn_mismatch: object + :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from + the server. The default value is false. + :type allow_self_signed_server_cert: object + :param time_zone_id: The local time zone used by the connection. Valid values for this option + are specified in the IANA Time Zone Database. The default value is the system time zone. + :type time_zone_id: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object """ _validation = { - 'value': {'required': True}, + 'type': {'required': True}, + 'host': {'required': True}, + 'server_version': {'required': True}, + 'catalog': {'required': True}, + 'authentication_type': {'required': True}, } _attribute_map = { - 'value': {'key': 'value', 'type': '[NotebookResource]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'server_version': {'key': 'typeProperties.serverVersion', 'type': 'object'}, + 'catalog': {'key': 'typeProperties.catalog', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, + 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, + 'time_zone_id': {'key': 'typeProperties.timeZoneID', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, *, - value: List["NotebookResource"], - next_link: Optional[str] = None, + host: object, + server_version: object, + catalog: object, + authentication_type: Union[str, "PrestoAuthenticationType"], + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + port: Optional[object] = None, + username: Optional[object] = None, + password: Optional["SecretBase"] = None, + enable_ssl: Optional[object] = None, + trusted_cert_path: Optional[object] = None, + use_system_trust_store: Optional[object] = None, + allow_host_name_cn_mismatch: Optional[object] = None, + allow_self_signed_server_cert: Optional[object] = None, + time_zone_id: Optional[object] = None, + encrypted_credential: Optional[object] = None, **kwargs ): - super(NotebookListResponse, self).__init__(**kwargs) - self.value = value - self.next_link = next_link + super(PrestoLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'Presto' # type: str + self.host = host + self.server_version = server_version + self.catalog = catalog + self.port = port + self.authentication_type = authentication_type + self.username = username + self.password = password + self.enable_ssl = enable_ssl + self.trusted_cert_path = trusted_cert_path + self.use_system_trust_store = use_system_trust_store + self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch + self.allow_self_signed_server_cert = allow_self_signed_server_cert + self.time_zone_id = time_zone_id + self.encrypted_credential = encrypted_credential -class NotebookMetadata(msrest.serialization.Model): - """Notebook root-level metadata. +class PrestoObjectDataset(Dataset): + """Presto server dataset. + + All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param kernelspec: Kernel information. - :type kernelspec: ~azure.synapse.artifacts.models.NotebookKernelSpec - :param language_info: Language info. - :type language_info: ~azure.synapse.artifacts.models.NotebookLanguageInfo + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: This property will be retired. Please consider using schema + table + properties instead. + :type table_name: object + :param table: The table name of the Presto. Type: string (or Expression with resultType + string). + :type table: object + :param schema_type_properties_schema: The schema name of the Presto. Type: string (or + Expression with resultType string). + :type schema_type_properties_schema: object """ + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'kernelspec': {'key': 'kernelspec', 'type': 'NotebookKernelSpec'}, - 'language_info': {'key': 'language_info', 'type': 'NotebookLanguageInfo'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } def __init__( self, *, + linked_service_name: "LinkedServiceReference", additional_properties: Optional[Dict[str, object]] = None, - kernelspec: Optional["NotebookKernelSpec"] = None, - language_info: Optional["NotebookLanguageInfo"] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + table_name: Optional[object] = None, + table: Optional[object] = None, + schema_type_properties_schema: Optional[object] = None, **kwargs ): - super(NotebookMetadata, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.kernelspec = kernelspec - self.language_info = language_info - + super(PrestoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'PrestoObject' # type: str + self.table_name = table_name + self.table = table + self.schema_type_properties_schema = schema_type_properties_schema -class NotebookResource(SubResource): - """Notebook resource type. - Variables are only populated by the server, and will be ignored when sending a request. +class PrestoSource(TabularSource): + """A copy activity Presto server source. All required parameters must be populated in order to send to Azure. - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - :param properties: Required. Properties of Notebook. - :type properties: ~azure.synapse.artifacts.models.Notebook + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'properties': {'required': True}, + 'type': {'required': True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'Notebook'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, } def __init__( self, *, - properties: "Notebook", + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, **kwargs ): - super(NotebookResource, self).__init__(**kwargs) - self.properties = properties + super(PrestoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'PrestoSource' # type: str + self.query = query -class NotebookSessionProperties(msrest.serialization.Model): - """Session properties. +class PrivateEndpoint(msrest.serialization.Model): + """Private endpoint details. - All required parameters must be populated in order to send to Azure. + Variables are only populated by the server, and will be ignored when sending a request. - :param driver_memory: Required. Amount of memory to use for the driver process. - :type driver_memory: str - :param driver_cores: Required. Number of cores to use for the driver. - :type driver_cores: int - :param executor_memory: Required. Amount of memory to use per executor process. - :type executor_memory: str - :param executor_cores: Required. Number of cores to use for each executor. - :type executor_cores: int - :param num_executors: Required. Number of executors to launch for this session. - :type num_executors: int + :ivar id: Resource id of the private endpoint. + :vartype id: str """ _validation = { - 'driver_memory': {'required': True}, - 'driver_cores': {'required': True}, - 'executor_memory': {'required': True}, - 'executor_cores': {'required': True}, - 'num_executors': {'required': True}, + 'id': {'readonly': True}, } _attribute_map = { - 'driver_memory': {'key': 'driverMemory', 'type': 'str'}, - 'driver_cores': {'key': 'driverCores', 'type': 'int'}, - 'executor_memory': {'key': 'executorMemory', 'type': 'str'}, - 'executor_cores': {'key': 'executorCores', 'type': 'int'}, - 'num_executors': {'key': 'numExecutors', 'type': 'int'}, + 'id': {'key': 'id', 'type': 'str'}, } def __init__( self, - *, - driver_memory: str, - driver_cores: int, - executor_memory: str, - executor_cores: int, - num_executors: int, **kwargs ): - super(NotebookSessionProperties, self).__init__(**kwargs) - self.driver_memory = driver_memory - self.driver_cores = driver_cores - self.executor_memory = executor_memory - self.executor_cores = executor_cores - self.num_executors = num_executors + super(PrivateEndpoint, self).__init__(**kwargs) + self.id = None -class ODataLinkedService(LinkedService): - """Open Data Protocol (OData) linked service. +class PrivateEndpointConnection(Resource): + """A private endpoint connection. - All required parameters must be populated in order to send to Azure. + Variables are only populated by the server, and will be ignored when sending a request. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param url: Required. The URL of the OData service endpoint. Type: string (or Expression with - resultType string). - :type url: object - :param authentication_type: Type of authentication used to connect to the OData service. - Possible values include: "Basic", "Anonymous", "Windows", "AadServicePrincipal", - "ManagedServiceIdentity". - :type authentication_type: str or ~azure.synapse.artifacts.models.ODataAuthenticationType - :param user_name: User name of the OData service. Type: string (or Expression with resultType - string). - :type user_name: object - :param password: Password of the OData service. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param tenant: Specify the tenant information (domain name or tenant ID) under which your - application resides. Type: string (or Expression with resultType string). - :type tenant: object - :param service_principal_id: Specify the application id of your application registered in Azure - Active Directory. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param aad_resource_id: Specify the resource you are requesting authorization to use Directory. - Type: string (or Expression with resultType string). - :type aad_resource_id: object - :param aad_service_principal_credential_type: Specify the credential type (key or cert) is used - for service principal. Possible values include: "ServicePrincipalKey", "ServicePrincipalCert". - :type aad_service_principal_credential_type: str or - ~azure.synapse.artifacts.models.ODataAadServicePrincipalCredentialType - :param service_principal_key: Specify the secret of your application registered in Azure Active - Directory. Type: string (or Expression with resultType string). - :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase - :param service_principal_embedded_cert: Specify the base64 encoded certificate of your - application registered in Azure Active Directory. Type: string (or Expression with resultType - string). - :type service_principal_embedded_cert: ~azure.synapse.artifacts.models.SecretBase - :param service_principal_embedded_cert_password: Specify the password of your certificate if - your certificate has a password and you are using AadServicePrincipal authentication. Type: - string (or Expression with resultType string). - :type service_principal_embedded_cert_password: ~azure.synapse.artifacts.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + :param private_endpoint: The private endpoint which the connection belongs to. + :type private_endpoint: ~azure.synapse.artifacts.models.PrivateEndpoint + :param private_link_service_connection_state: Connection state of the private endpoint + connection. + :type private_link_service_connection_state: + ~azure.synapse.artifacts.models.PrivateLinkServiceConnectionState + :ivar provisioning_state: Provisioning state of the private endpoint connection. + :vartype provisioning_state: str """ _validation = { - 'type': {'required': True}, - 'url': {'required': True}, + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'provisioning_state': {'readonly': True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, - 'aad_service_principal_credential_type': {'key': 'typeProperties.aadServicePrincipalCredentialType', 'type': 'str'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'service_principal_embedded_cert': {'key': 'typeProperties.servicePrincipalEmbeddedCert', 'type': 'SecretBase'}, - 'service_principal_embedded_cert_password': {'key': 'typeProperties.servicePrincipalEmbeddedCertPassword', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'private_endpoint': {'key': 'properties.privateEndpoint', 'type': 'PrivateEndpoint'}, + 'private_link_service_connection_state': {'key': 'properties.privateLinkServiceConnectionState', 'type': 'PrivateLinkServiceConnectionState'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, } def __init__( - self, - *, - url: object, - additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, - description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - authentication_type: Optional[Union[str, "ODataAuthenticationType"]] = None, - user_name: Optional[object] = None, - password: Optional["SecretBase"] = None, - tenant: Optional[object] = None, - service_principal_id: Optional[object] = None, - aad_resource_id: Optional[object] = None, - aad_service_principal_credential_type: Optional[Union[str, "ODataAadServicePrincipalCredentialType"]] = None, - service_principal_key: Optional["SecretBase"] = None, - service_principal_embedded_cert: Optional["SecretBase"] = None, - service_principal_embedded_cert_password: Optional["SecretBase"] = None, - encrypted_credential: Optional[object] = None, - **kwargs - ): - super(ODataLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'OData' - self.url = url - self.authentication_type = authentication_type - self.user_name = user_name - self.password = password - self.tenant = tenant - self.service_principal_id = service_principal_id - self.aad_resource_id = aad_resource_id - self.aad_service_principal_credential_type = aad_service_principal_credential_type - self.service_principal_key = service_principal_key - self.service_principal_embedded_cert = service_principal_embedded_cert - self.service_principal_embedded_cert_password = service_principal_embedded_cert_password - self.encrypted_credential = encrypted_credential + self, + *, + private_endpoint: Optional["PrivateEndpoint"] = None, + private_link_service_connection_state: Optional["PrivateLinkServiceConnectionState"] = None, + **kwargs + ): + super(PrivateEndpointConnection, self).__init__(**kwargs) + self.private_endpoint = private_endpoint + self.private_link_service_connection_state = private_link_service_connection_state + self.provisioning_state = None -class ODataResourceDataset(Dataset): - """The Open Data Protocol (OData) resource dataset. +class PrivateLinkServiceConnectionState(msrest.serialization.Model): + """Connection state details of the private endpoint. - All required parameters must be populated in order to send to Azure. + Variables are only populated by the server, and will be ignored when sending a request. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. + :param status: The private link service connection status. Possible values include: "Approved", + "Pending", "Rejected", "Disconnected". + :type status: str or ~azure.synapse.artifacts.models.PrivateLinkServiceConnectionStateStatus + :param description: The private link service connection description. :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param path: The OData resource path. Type: string (or Expression with resultType string). - :type path: object + :ivar actions_required: The actions required for private link service connection. + :vartype actions_required: str """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + 'actions_required': {'readonly': True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, + 'status': {'key': 'status', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'path': {'key': 'typeProperties.path', 'type': 'object'}, + 'actions_required': {'key': 'actionsRequired', 'type': 'str'}, } def __init__( self, *, - linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, + status: Optional[Union[str, "PrivateLinkServiceConnectionStateStatus"]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, - path: Optional[object] = None, **kwargs ): - super(ODataResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'ODataResource' - self.path = path + super(PrivateLinkServiceConnectionState, self).__init__(**kwargs) + self.status = status + self.description = description + self.actions_required = None -class OdbcLinkedService(LinkedService): - """Open Database Connectivity (ODBC) linked service. +class ProxyResource(Resource): + """The resource model definition for a ARM proxy resource. It will have everything other than required location and tags. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ProxyResource, self).__init__(**kwargs) + + +class QueryDataFlowDebugSessionsResponse(msrest.serialization.Model): + """A list of active debug sessions. + + :param value: Array with all active debug sessions. + :type value: list[~azure.synapse.artifacts.models.DataFlowDebugSessionInfo] + :param next_link: The link to the next page of results, if any remaining results exist. + :type next_link: str + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[DataFlowDebugSessionInfo]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + *, + value: Optional[List["DataFlowDebugSessionInfo"]] = None, + next_link: Optional[str] = None, + **kwargs + ): + super(QueryDataFlowDebugSessionsResponse, self).__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class QuickBooksLinkedService(LinkedService): + """QuickBooks server linked service. All required parameters must be populated in order to send to Azure. @@ -14501,21 +25543,22 @@ class OdbcLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param connection_string: Required. The non-access credential portion of the connection string - as well as an optional encrypted credential. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param authentication_type: Type of authentication used to connect to the ODBC data store. - Possible values are: Anonymous and Basic. Type: string (or Expression with resultType string). - :type authentication_type: object - :param credential: The access credential portion of the connection string specified in driver- - specific property-value format. - :type credential: ~azure.synapse.artifacts.models.SecretBase - :param user_name: User name for Basic authentication. Type: string (or Expression with - resultType string). - :type user_name: object - :param password: Password for Basic authentication. - :type password: ~azure.synapse.artifacts.models.SecretBase + :param endpoint: Required. The endpoint of the QuickBooks server. (i.e. + quickbooks.api.intuit.com). + :type endpoint: object + :param company_id: Required. The company ID of the QuickBooks company to authorize. + :type company_id: object + :param consumer_key: Required. The consumer key for OAuth 1.0 authentication. + :type consumer_key: object + :param consumer_secret: Required. The consumer secret for OAuth 1.0 authentication. + :type consumer_secret: ~azure.synapse.artifacts.models.SecretBase + :param access_token: Required. The access token for OAuth 1.0 authentication. + :type access_token: ~azure.synapse.artifacts.models.SecretBase + :param access_token_secret: Required. The access token secret for OAuth 1.0 authentication. + :type access_token_secret: ~azure.synapse.artifacts.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using + HTTPS. The default value is true. + :type use_encrypted_endpoints: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -14524,7 +25567,12 @@ class OdbcLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'connection_string': {'required': True}, + 'endpoint': {'required': True}, + 'company_id': {'required': True}, + 'consumer_key': {'required': True}, + 'consumer_secret': {'required': True}, + 'access_token': {'required': True}, + 'access_token_secret': {'required': True}, } _attribute_map = { @@ -14534,42 +25582,48 @@ class OdbcLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, - 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'company_id': {'key': 'typeProperties.companyId', 'type': 'object'}, + 'consumer_key': {'key': 'typeProperties.consumerKey', 'type': 'object'}, + 'consumer_secret': {'key': 'typeProperties.consumerSecret', 'type': 'SecretBase'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'access_token_secret': {'key': 'typeProperties.accessTokenSecret', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, *, - connection_string: object, + endpoint: object, + company_id: object, + consumer_key: object, + consumer_secret: "SecretBase", + access_token: "SecretBase", + access_token_secret: "SecretBase", additional_properties: Optional[Dict[str, object]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, - authentication_type: Optional[object] = None, - credential: Optional["SecretBase"] = None, - user_name: Optional[object] = None, - password: Optional["SecretBase"] = None, + use_encrypted_endpoints: Optional[object] = None, encrypted_credential: Optional[object] = None, **kwargs ): - super(OdbcLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Odbc' - self.connection_string = connection_string - self.authentication_type = authentication_type - self.credential = credential - self.user_name = user_name - self.password = password + super(QuickBooksLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'QuickBooks' # type: str + self.endpoint = endpoint + self.company_id = company_id + self.consumer_key = consumer_key + self.consumer_secret = consumer_secret + self.access_token = access_token + self.access_token_secret = access_token_secret + self.use_encrypted_endpoints = use_encrypted_endpoints self.encrypted_credential = encrypted_credential -class OdbcTableDataset(Dataset): - """The ODBC table dataset. +class QuickBooksObjectDataset(Dataset): + """QuickBooks server dataset. All required parameters must be populated in order to send to Azure. @@ -14595,7 +25649,7 @@ class OdbcTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The ODBC table name. Type: string (or Expression with resultType string). + :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object """ @@ -14631,329 +25685,279 @@ def __init__( table_name: Optional[object] = None, **kwargs ): - super(OdbcTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'OdbcTable' + super(QuickBooksObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'QuickBooksObject' # type: str self.table_name = table_name -class Office365Dataset(Dataset): - """The Office365 account. +class QuickBooksSource(TabularSource): + """A copy activity QuickBooks server source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: Required. Name of the dataset to extract from Office 365. Type: string (or - Expression with resultType string). - :type table_name: object - :param predicate: A predicate expression that can be used to filter the specific rows to - extract from Office 365. Type: string (or Expression with resultType string). - :type predicate: object + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(QuickBooksSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'QuickBooksSource' # type: str + self.query = query + + +class RecurrenceSchedule(msrest.serialization.Model): + """The recurrence schedule. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param minutes: The minutes. + :type minutes: list[int] + :param hours: The hours. + :type hours: list[int] + :param week_days: The days of the week. + :type week_days: list[str or ~azure.synapse.artifacts.models.DayOfWeek] + :param month_days: The month days. + :type month_days: list[int] + :param monthly_occurrences: The monthly occurrences. + :type monthly_occurrences: list[~azure.synapse.artifacts.models.RecurrenceScheduleOccurrence] """ - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - 'table_name': {'required': True}, + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'minutes': {'key': 'minutes', 'type': '[int]'}, + 'hours': {'key': 'hours', 'type': '[int]'}, + 'week_days': {'key': 'weekDays', 'type': '[str]'}, + 'month_days': {'key': 'monthDays', 'type': '[int]'}, + 'monthly_occurrences': {'key': 'monthlyOccurrences', 'type': '[RecurrenceScheduleOccurrence]'}, } + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + minutes: Optional[List[int]] = None, + hours: Optional[List[int]] = None, + week_days: Optional[List[Union[str, "DayOfWeek"]]] = None, + month_days: Optional[List[int]] = None, + monthly_occurrences: Optional[List["RecurrenceScheduleOccurrence"]] = None, + **kwargs + ): + super(RecurrenceSchedule, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.minutes = minutes + self.hours = hours + self.week_days = week_days + self.month_days = month_days + self.monthly_occurrences = monthly_occurrences + + +class RecurrenceScheduleOccurrence(msrest.serialization.Model): + """The recurrence schedule occurrence. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param day: The day of the week. Possible values include: "Sunday", "Monday", "Tuesday", + "Wednesday", "Thursday", "Friday", "Saturday". + :type day: str or ~azure.synapse.artifacts.models.DayOfWeek + :param occurrence: The occurrence. + :type occurrence: int + """ + _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'predicate': {'key': 'typeProperties.predicate', 'type': 'object'}, + 'day': {'key': 'day', 'type': 'str'}, + 'occurrence': {'key': 'occurrence', 'type': 'int'}, } def __init__( self, *, - linked_service_name: "LinkedServiceReference", - table_name: object, additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, - predicate: Optional[object] = None, + day: Optional[Union[str, "DayOfWeek"]] = None, + occurrence: Optional[int] = None, **kwargs ): - super(Office365Dataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'Office365Table' - self.table_name = table_name - self.predicate = predicate + super(RecurrenceScheduleOccurrence, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.day = day + self.occurrence = occurrence -class Office365LinkedService(LinkedService): - """Office365 linked service. +class RedirectIncompatibleRowSettings(msrest.serialization.Model): + """Redirect incompatible row settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param office365_tenant_id: Required. Azure tenant ID to which the Office 365 account belongs. - Type: string (or Expression with resultType string). - :type office365_tenant_id: object - :param service_principal_tenant_id: Required. Specify the tenant information under which your - Azure AD web application resides. Type: string (or Expression with resultType string). - :type service_principal_tenant_id: object - :param service_principal_id: Required. Specify the application's client ID. Type: string (or + :param linked_service_name: Required. Name of the Azure Storage, Storage SAS, or Azure Data + Lake Store linked service used for redirecting incompatible row. Must be specified if + redirectIncompatibleRowSettings is specified. Type: string (or Expression with resultType + string). + :type linked_service_name: object + :param path: The path for storing the redirect incompatible row data. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: Required. Specify the application's key. - :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :type path: object """ _validation = { - 'type': {'required': True}, - 'office365_tenant_id': {'required': True}, - 'service_principal_tenant_id': {'required': True}, - 'service_principal_id': {'required': True}, - 'service_principal_key': {'required': True}, + 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'office365_tenant_id': {'key': 'typeProperties.office365TenantId', 'type': 'object'}, - 'service_principal_tenant_id': {'key': 'typeProperties.servicePrincipalTenantId', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'object'}, + 'path': {'key': 'path', 'type': 'object'}, } def __init__( self, *, - office365_tenant_id: object, - service_principal_tenant_id: object, - service_principal_id: object, - service_principal_key: "SecretBase", + linked_service_name: object, additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, - description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - encrypted_credential: Optional[object] = None, + path: Optional[object] = None, **kwargs ): - super(Office365LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Office365' - self.office365_tenant_id = office365_tenant_id - self.service_principal_tenant_id = service_principal_tenant_id - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.encrypted_credential = encrypted_credential + super(RedirectIncompatibleRowSettings, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.linked_service_name = linked_service_name + self.path = path -class OracleLinkedService(LinkedService): - """Oracle database. +class RedshiftUnloadSettings(msrest.serialization.Model): + """The Amazon S3 settings needed for the interim Amazon S3 when copying from Amazon Redshift with unload. With this, data from Amazon Redshift source will be unloaded into S3 first and then copied into the targeted sink from the interim S3. All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: Required. The connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param password: The Azure key vault secret reference of password in connection string. - :type password: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param s3_linked_service_name: Required. The name of the Amazon S3 linked service which will be + used for the unload operation when copying from the Amazon Redshift source. + :type s3_linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param bucket_name: Required. The bucket of the interim Amazon S3 which will be used to store + the unloaded data from Amazon Redshift source. The bucket must be in the same region as the + Amazon Redshift source. Type: string (or Expression with resultType string). + :type bucket_name: object """ _validation = { - 'type': {'required': True}, - 'connection_string': {'required': True}, + 's3_linked_service_name': {'required': True}, + 'bucket_name': {'required': True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 's3_linked_service_name': {'key': 's3LinkedServiceName', 'type': 'LinkedServiceReference'}, + 'bucket_name': {'key': 'bucketName', 'type': 'object'}, } def __init__( self, *, - connection_string: object, - additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, - description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - password: Optional["AzureKeyVaultSecretReference"] = None, - encrypted_credential: Optional[object] = None, + s3_linked_service_name: "LinkedServiceReference", + bucket_name: object, **kwargs ): - super(OracleLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Oracle' - self.connection_string = connection_string - self.password = password - self.encrypted_credential = encrypted_credential + super(RedshiftUnloadSettings, self).__init__(**kwargs) + self.s3_linked_service_name = s3_linked_service_name + self.bucket_name = bucket_name -class OracleServiceCloudLinkedService(LinkedService): - """Oracle Service Cloud linked service. +class RelationalSource(CopySource): + """A copy activity source for various relational databases. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param host: Required. The URL of the Oracle Service Cloud instance. - :type host: object - :param username: Required. The user name that you use to access Oracle Service Cloud server. - :type username: object - :param password: Required. The password corresponding to the user name that you provided in the - username key. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using - HTTPS. The default value is true. Type: boolean (or Expression with resultType boolean). - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name in the server's - certificate to match the host name of the server when connecting over SSL. The default value is - true. Type: boolean (or Expression with resultType boolean). - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of the server when - connecting over SSL. The default value is true. Type: boolean (or Expression with resultType - boolean). - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query: Database query. Type: string (or Expression with resultType string). + :type query: object """ _validation = { 'type': {'required': True}, - 'host': {'required': True}, - 'username': {'required': True}, - 'password': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, } def __init__( self, *, - host: object, - username: object, - password: "SecretBase", additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, - description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - use_encrypted_endpoints: Optional[object] = None, - use_host_verification: Optional[object] = None, - use_peer_verification: Optional[object] = None, - encrypted_credential: Optional[object] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query: Optional[object] = None, **kwargs ): - super(OracleServiceCloudLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'OracleServiceCloud' - self.host = host - self.username = username - self.password = password - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification - self.encrypted_credential = encrypted_credential + super(RelationalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'RelationalSource' # type: str + self.query = query -class OracleServiceCloudObjectDataset(Dataset): - """Oracle Service Cloud dataset. +class RelationalTableDataset(Dataset): + """The relational table dataset. All required parameters must be populated in order to send to Azure. @@ -14979,7 +25983,8 @@ class OracleServiceCloudObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). + :param table_name: The relational table name. Type: string (or Expression with resultType + string). :type table_name: object """ @@ -15015,200 +26020,302 @@ def __init__( table_name: Optional[object] = None, **kwargs ): - super(OracleServiceCloudObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'OracleServiceCloudObject' + super(RelationalTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'RelationalTable' # type: str self.table_name = table_name -class OracleTableDataset(Dataset): - """The on-premises Oracle database dataset. +class RerunTriggerListResponse(msrest.serialization.Model): + """A list of rerun triggers. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of rerun triggers. + :type value: list[~azure.synapse.artifacts.models.RerunTriggerResource] + :ivar next_link: The continuation token for getting the next page of results, if any remaining + results exist, null otherwise. + :vartype next_link: str + """ + + _validation = { + 'value': {'required': True}, + 'next_link': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[RerunTriggerResource]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + *, + value: List["RerunTriggerResource"], + **kwargs + ): + super(RerunTriggerListResponse, self).__init__(**kwargs) + self.value = value + self.next_link = None + + +class RerunTriggerResource(AzureEntityResource): + """RerunTrigger resource type. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + :ivar etag: Resource Etag. + :vartype etag: str + :param properties: Required. Properties of the rerun trigger. + :type properties: ~azure.synapse.artifacts.models.RerunTumblingWindowTrigger + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'RerunTumblingWindowTrigger'}, + } + + def __init__( + self, + *, + properties: "RerunTumblingWindowTrigger", + **kwargs + ): + super(RerunTriggerResource, self).__init__(**kwargs) + self.properties = properties + + +class RerunTumblingWindowTrigger(Trigger): + """Trigger that schedules pipeline reruns for all fixed time interval windows from a requested start time to requested end time. + + Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Trigger type.Constant filled by server. :type type: str - :param description: Dataset description. + :param description: Trigger description. :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. + :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are + called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". + :vartype runtime_state: str or ~azure.synapse.artifacts.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the trigger. :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table - properties instead. - :type table_name: object - :param schema_type_properties_schema: The schema name of the on-premises Oracle database. Type: - string (or Expression with resultType string). - :type schema_type_properties_schema: object - :param table: The table name of the on-premises Oracle database. Type: string (or Expression - with resultType string). - :type table: object + :param parent_trigger: The parent trigger reference. + :type parent_trigger: object + :param requested_start_time: Required. The start time for the time period for which restatement + is initiated. Only UTC time is currently supported. + :type requested_start_time: ~datetime.datetime + :param requested_end_time: Required. The end time for the time period for which restatement is + initiated. Only UTC time is currently supported. + :type requested_end_time: ~datetime.datetime + :param max_concurrency: Required. The max number of parallel time windows (ready for execution) + for which a rerun is triggered. + :type max_concurrency: int """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, + 'runtime_state': {'readonly': True}, + 'requested_start_time': {'required': True}, + 'requested_end_time': {'required': True}, + 'max_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'parent_trigger': {'key': 'typeProperties.parentTrigger', 'type': 'object'}, + 'requested_start_time': {'key': 'typeProperties.requestedStartTime', 'type': 'iso-8601'}, + 'requested_end_time': {'key': 'typeProperties.requestedEndTime', 'type': 'iso-8601'}, + 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, } def __init__( self, *, - linked_service_name: "LinkedServiceReference", + requested_start_time: datetime.datetime, + requested_end_time: datetime.datetime, + max_concurrency: int, additional_properties: Optional[Dict[str, object]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, - schema_type_properties_schema: Optional[object] = None, - table: Optional[object] = None, + parent_trigger: Optional[object] = None, **kwargs ): - super(OracleTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'OracleTable' - self.table_name = table_name - self.schema_type_properties_schema = schema_type_properties_schema - self.table = table + super(RerunTumblingWindowTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) + self.type = 'RerunTumblingWindowTrigger' # type: str + self.parent_trigger = parent_trigger + self.requested_start_time = requested_start_time + self.requested_end_time = requested_end_time + self.max_concurrency = max_concurrency -class OrcDataset(Dataset): - """ORC dataset. +class RerunTumblingWindowTriggerActionParameters(msrest.serialization.Model): + """Rerun tumbling window trigger Parameters. + + All required parameters must be populated in order to send to Azure. + + :param start_time: Required. The start time for the time period for which restatement is + initiated. Only UTC time is currently supported. + :type start_time: ~datetime.datetime + :param end_time: Required. The end time for the time period for which restatement is initiated. + Only UTC time is currently supported. + :type end_time: ~datetime.datetime + :param max_concurrency: Required. The max number of parallel time windows (ready for execution) + for which a rerun is triggered. + :type max_concurrency: int + """ + + _validation = { + 'start_time': {'required': True}, + 'end_time': {'required': True}, + 'max_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, + } + + _attribute_map = { + 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, + 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, + 'max_concurrency': {'key': 'maxConcurrency', 'type': 'int'}, + } + + def __init__( + self, + *, + start_time: datetime.datetime, + end_time: datetime.datetime, + max_concurrency: int, + **kwargs + ): + super(RerunTumblingWindowTriggerActionParameters, self).__init__(**kwargs) + self.start_time = start_time + self.end_time = end_time + self.max_concurrency = max_concurrency + + +class ResponsysLinkedService(LinkedService): + """Responsys linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Type of linked service.Constant filled by server. :type type: str - :param description: Dataset description. + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. + :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. + :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param location: The location of the ORC data storage. - :type location: ~azure.synapse.artifacts.models.DatasetLocation - :param orc_compression_codec: Possible values include: "none", "zlib", "snappy". - :type orc_compression_codec: str or ~azure.synapse.artifacts.models.OrcCompressionCodec + :param endpoint: Required. The endpoint of the Responsys server. + :type endpoint: object + :param client_id: Required. The client ID associated with the Responsys application. Type: + string (or Expression with resultType string). + :type client_id: object + :param client_secret: The client secret associated with the Responsys application. Type: string + (or Expression with resultType string). + :type client_secret: ~azure.synapse.artifacts.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using + HTTPS. The default value is true. Type: boolean (or Expression with resultType boolean). + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name in the server's + certificate to match the host name of the server when connecting over SSL. The default value is + true. Type: boolean (or Expression with resultType boolean). + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of the server when + connecting over SSL. The default value is true. Type: boolean (or Expression with resultType + boolean). + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, + 'endpoint': {'required': True}, + 'client_id': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'orc_compression_codec': {'key': 'typeProperties.orcCompressionCodec', 'type': 'str'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, *, - linked_service_name: "LinkedServiceReference", + endpoint: object, + client_id: object, additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, - location: Optional["DatasetLocation"] = None, - orc_compression_codec: Optional[Union[str, "OrcCompressionCodec"]] = None, - **kwargs - ): - super(OrcDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'Orc' - self.location = location - self.orc_compression_codec = orc_compression_codec - - -class ParameterSpecification(msrest.serialization.Model): - """Definition of a single parameter for an entity. - - All required parameters must be populated in order to send to Azure. - - :param type: Required. Parameter type. Possible values include: "Object", "String", "Int", - "Float", "Bool", "Array", "SecureString". - :type type: str or ~azure.synapse.artifacts.models.ParameterType - :param default_value: Default value of parameter. - :type default_value: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'default_value': {'key': 'defaultValue', 'type': 'object'}, - } - - def __init__( - self, - *, - type: Union[str, "ParameterType"], - default_value: Optional[object] = None, + client_secret: Optional["SecretBase"] = None, + use_encrypted_endpoints: Optional[object] = None, + use_host_verification: Optional[object] = None, + use_peer_verification: Optional[object] = None, + encrypted_credential: Optional[object] = None, **kwargs ): - super(ParameterSpecification, self).__init__(**kwargs) - self.type = type - self.default_value = default_value + super(ResponsysLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'Responsys' # type: str + self.endpoint = endpoint + self.client_id = client_id + self.client_secret = client_secret + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential -class ParquetDataset(Dataset): - """Parquet dataset. +class ResponsysObjectDataset(Dataset): + """Responsys dataset. All required parameters must be populated in order to send to Azure. @@ -15234,10 +26341,8 @@ class ParquetDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param location: The location of the parquet storage. - :type location: ~azure.synapse.artifacts.models.DatasetLocation - :param compression_codec: Possible values include: "none", "gzip", "snappy", "lzo". - :type compression_codec: str or ~azure.synapse.artifacts.models.ParquetCompressionCodec + :param table_name: The table name. Type: string (or Expression with resultType string). + :type table_name: object """ _validation = { @@ -15255,8 +26360,7 @@ class ParquetDataset(Dataset): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } def __init__( @@ -15270,108 +26374,73 @@ def __init__( parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, folder: Optional["DatasetFolder"] = None, - location: Optional["DatasetLocation"] = None, - compression_codec: Optional[Union[str, "ParquetCompressionCodec"]] = None, + table_name: Optional[object] = None, **kwargs ): - super(ParquetDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'Parquet' - self.location = location - self.compression_codec = compression_codec + super(ResponsysObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'ResponsysObject' # type: str + self.table_name = table_name -class PaypalLinkedService(LinkedService): - """Paypal Service linked service. +class ResponsysSource(TabularSource): + """A copy activity Responsys source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param host: Required. The URL of the PayPal instance. (i.e. api.sandbox.paypal.com). - :type host: object - :param client_id: Required. The client ID associated with your PayPal application. - :type client_id: object - :param client_secret: The client secret associated with your PayPal application. - :type client_secret: ~azure.synapse.artifacts.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using - HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name in the server's - certificate to match the host name of the server when connecting over SSL. The default value is - true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of the server when - connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object """ _validation = { 'type': {'required': True}, - 'host': {'required': True}, - 'client_id': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, } def __init__( self, *, - host: object, - client_id: object, additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, - description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - client_secret: Optional["SecretBase"] = None, - use_encrypted_endpoints: Optional[object] = None, - use_host_verification: Optional[object] = None, - use_peer_verification: Optional[object] = None, - encrypted_credential: Optional[object] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, **kwargs ): - super(PaypalLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Paypal' - self.host = host - self.client_id = client_id - self.client_secret = client_secret - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification - self.encrypted_credential = encrypted_credential + super(ResponsysSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'ResponsysSource' # type: str + self.query = query -class PaypalObjectDataset(Dataset): - """Paypal Service dataset. +class RestResourceDataset(Dataset): + """A Rest service dataset. All required parameters must be populated in order to send to Azure. @@ -15397,8 +26466,21 @@ class PaypalObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :param relative_url: The relative URL to the resource that the RESTful API provides. Type: + string (or Expression with resultType string). + :type relative_url: object + :param request_method: The HTTP method used to call the RESTful API. The default is GET. Type: + string (or Expression with resultType string). + :type request_method: object + :param request_body: The HTTP request body to the RESTful API if requestMethod is POST. Type: + string (or Expression with resultType string). + :type request_body: object + :param additional_headers: The additional HTTP headers in the request to the RESTful API. Type: + string (or Expression with resultType string). + :type additional_headers: object + :param pagination_rules: The pagination rules to compose next page requests. Type: string (or + Expression with resultType string). + :type pagination_rules: object """ _validation = { @@ -15416,7 +26498,11 @@ class PaypalObjectDataset(Dataset): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'relative_url': {'key': 'typeProperties.relativeUrl', 'type': 'object'}, + 'request_method': {'key': 'typeProperties.requestMethod', 'type': 'object'}, + 'request_body': {'key': 'typeProperties.requestBody', 'type': 'object'}, + 'additional_headers': {'key': 'typeProperties.additionalHeaders', 'type': 'object'}, + 'pagination_rules': {'key': 'typeProperties.paginationRules', 'type': 'object'}, } def __init__( @@ -15430,16 +26516,24 @@ def __init__( parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, + relative_url: Optional[object] = None, + request_method: Optional[object] = None, + request_body: Optional[object] = None, + additional_headers: Optional[object] = None, + pagination_rules: Optional[object] = None, **kwargs ): - super(PaypalObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'PaypalObject' - self.table_name = table_name + super(RestResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'RestResource' # type: str + self.relative_url = relative_url + self.request_method = request_method + self.request_body = request_body + self.additional_headers = additional_headers + self.pagination_rules = pagination_rules -class PhoenixLinkedService(LinkedService): - """Phoenix server linked service. +class RestServiceLinkedService(LinkedService): + """Rest Service linked service. All required parameters must be populated in order to send to Azure. @@ -15449,47 +26543,38 @@ class PhoenixLinkedService(LinkedService): :param type: Required. Type of linked service.Constant filled by server. :type type: str :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param host: Required. The IP address or host name of the Phoenix server. (i.e. - 192.168.222.160). - :type host: object - :param port: The TCP port that the Phoenix server uses to listen for client connections. The - default value is 8765. - :type port: object - :param http_path: The partial URL corresponding to the Phoenix server. (i.e. - /gateway/sandbox/phoenix/version). The default value is hbasephoenix if using - WindowsAzureHDInsightService. - :type http_path: object - :param authentication_type: Required. The authentication mechanism used to connect to the - Phoenix server. Possible values include: "Anonymous", "UsernameAndPassword", - "WindowsAzureHDInsightService". - :type authentication_type: str or ~azure.synapse.artifacts.models.PhoenixAuthenticationType - :param username: The user name used to connect to the Phoenix server. - :type username: object - :param password: The password corresponding to the user name. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param url: Required. The base URL of the REST service. + :type url: object + :param enable_server_certificate_validation: Whether to validate server side SSL certificate + when connecting to the endpoint.The default value is true. Type: boolean (or Expression with + resultType boolean). + :type enable_server_certificate_validation: object + :param authentication_type: Required. Type of authentication used to connect to the REST + service. Possible values include: "Anonymous", "Basic", "AadServicePrincipal", + "ManagedServiceIdentity". + :type authentication_type: str or ~azure.synapse.artifacts.models.RestServiceAuthenticationType + :param user_name: The user name used in Basic authentication type. + :type user_name: object + :param password: The password used in Basic authentication type. :type password: ~azure.synapse.artifacts.models.SecretBase - :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The - default value is false. - :type enable_ssl: object - :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for - verifying the server when connecting over SSL. This property can only be set when using SSL on - self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust - store or from a specified PEM file. The default value is false. - :type use_system_trust_store: object - :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate - name to match the host name of the server when connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: object - :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from - the server. The default value is false. - :type allow_self_signed_server_cert: object + :param service_principal_id: The application's client ID used in AadServicePrincipal + authentication type. + :type service_principal_id: object + :param service_principal_key: The application's key used in AadServicePrincipal authentication + type. + :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase + :param tenant: The tenant information (domain name or tenant ID) used in AadServicePrincipal + authentication type under which your application resides. + :type tenant: object + :param aad_resource_id: The resource you are requesting authorization to use. + :type aad_resource_id: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -15498,7 +26583,7 @@ class PhoenixLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'host': {'required': True}, + 'url': {'required': True}, 'authentication_type': {'required': True}, } @@ -15509,475 +26594,661 @@ class PhoenixLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, - 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, - 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, *, - host: object, - authentication_type: Union[str, "PhoenixAuthenticationType"], + url: object, + authentication_type: Union[str, "RestServiceAuthenticationType"], additional_properties: Optional[Dict[str, object]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, - port: Optional[object] = None, - http_path: Optional[object] = None, - username: Optional[object] = None, + enable_server_certificate_validation: Optional[object] = None, + user_name: Optional[object] = None, password: Optional["SecretBase"] = None, - enable_ssl: Optional[object] = None, - trusted_cert_path: Optional[object] = None, - use_system_trust_store: Optional[object] = None, - allow_host_name_cn_mismatch: Optional[object] = None, - allow_self_signed_server_cert: Optional[object] = None, + service_principal_id: Optional[object] = None, + service_principal_key: Optional["SecretBase"] = None, + tenant: Optional[object] = None, + aad_resource_id: Optional[object] = None, encrypted_credential: Optional[object] = None, **kwargs ): - super(PhoenixLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Phoenix' - self.host = host - self.port = port - self.http_path = http_path + super(RestServiceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'RestService' # type: str + self.url = url + self.enable_server_certificate_validation = enable_server_certificate_validation self.authentication_type = authentication_type - self.username = username + self.user_name = user_name self.password = password - self.enable_ssl = enable_ssl - self.trusted_cert_path = trusted_cert_path - self.use_system_trust_store = use_system_trust_store - self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch - self.allow_self_signed_server_cert = allow_self_signed_server_cert + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.aad_resource_id = aad_resource_id self.encrypted_credential = encrypted_credential -class PhoenixObjectDataset(Dataset): - """Phoenix server dataset. +class RestSource(CopySource): + """A copy activity Rest service source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table - properties instead. - :type table_name: object - :param table: The table name of the Phoenix. Type: string (or Expression with resultType - string). - :type table: object - :param schema_type_properties_schema: The schema name of the Phoenix. Type: string (or + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param request_method: The HTTP method used to call the RESTful API. The default is GET. Type: + string (or Expression with resultType string). + :type request_method: object + :param request_body: The HTTP request body to the RESTful API if requestMethod is POST. Type: + string (or Expression with resultType string). + :type request_body: object + :param additional_headers: The additional HTTP headers in the request to the RESTful API. Type: + string (or Expression with resultType string). + :type additional_headers: object + :param pagination_rules: The pagination rules to compose next page requests. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: object + :type pagination_rules: object + :param http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout + to get a response, not the timeout to read response data. Default value: 00:01:40. Type: string + (or Expression with resultType string), pattern: + ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type http_request_timeout: object + :param request_interval: The time to await before sending next page request. + :type request_interval: object """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'request_method': {'key': 'requestMethod', 'type': 'object'}, + 'request_body': {'key': 'requestBody', 'type': 'object'}, + 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, + 'pagination_rules': {'key': 'paginationRules', 'type': 'object'}, + 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, + 'request_interval': {'key': 'requestInterval', 'type': 'object'}, } def __init__( self, *, - linked_service_name: "LinkedServiceReference", additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, - table: Optional[object] = None, - schema_type_properties_schema: Optional[object] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + request_method: Optional[object] = None, + request_body: Optional[object] = None, + additional_headers: Optional[object] = None, + pagination_rules: Optional[object] = None, + http_request_timeout: Optional[object] = None, + request_interval: Optional[object] = None, **kwargs ): - super(PhoenixObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'PhoenixObject' - self.table_name = table_name - self.table = table - self.schema_type_properties_schema = schema_type_properties_schema + super(RestSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'RestSource' # type: str + self.request_method = request_method + self.request_body = request_body + self.additional_headers = additional_headers + self.pagination_rules = pagination_rules + self.http_request_timeout = http_request_timeout + self.request_interval = request_interval + + +class RetryPolicy(msrest.serialization.Model): + """Execution policy for an activity. + + :param count: Maximum ordinary retry attempts. Default is 0. Type: integer (or Expression with + resultType integer), minimum: 0. + :type count: object + :param interval_in_seconds: Interval between retries in seconds. Default is 30. + :type interval_in_seconds: int + """ + + _validation = { + 'interval_in_seconds': {'maximum': 86400, 'minimum': 30}, + } + + _attribute_map = { + 'count': {'key': 'count', 'type': 'object'}, + 'interval_in_seconds': {'key': 'intervalInSeconds', 'type': 'int'}, + } + + def __init__( + self, + *, + count: Optional[object] = None, + interval_in_seconds: Optional[int] = None, + **kwargs + ): + super(RetryPolicy, self).__init__(**kwargs) + self.count = count + self.interval_in_seconds = interval_in_seconds + + +class RunFilterParameters(msrest.serialization.Model): + """Query parameters for listing runs. + + All required parameters must be populated in order to send to Azure. + + :param continuation_token: The continuation token for getting the next page of results. Null + for first page. + :type continuation_token: str + :param last_updated_after: Required. The time at or after which the run event was updated in + 'ISO 8601' format. + :type last_updated_after: ~datetime.datetime + :param last_updated_before: Required. The time at or before which the run event was updated in + 'ISO 8601' format. + :type last_updated_before: ~datetime.datetime + :param filters: List of filters. + :type filters: list[~azure.synapse.artifacts.models.RunQueryFilter] + :param order_by: List of OrderBy option. + :type order_by: list[~azure.synapse.artifacts.models.RunQueryOrderBy] + """ + + _validation = { + 'last_updated_after': {'required': True}, + 'last_updated_before': {'required': True}, + } + + _attribute_map = { + 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, + 'last_updated_after': {'key': 'lastUpdatedAfter', 'type': 'iso-8601'}, + 'last_updated_before': {'key': 'lastUpdatedBefore', 'type': 'iso-8601'}, + 'filters': {'key': 'filters', 'type': '[RunQueryFilter]'}, + 'order_by': {'key': 'orderBy', 'type': '[RunQueryOrderBy]'}, + } + + def __init__( + self, + *, + last_updated_after: datetime.datetime, + last_updated_before: datetime.datetime, + continuation_token: Optional[str] = None, + filters: Optional[List["RunQueryFilter"]] = None, + order_by: Optional[List["RunQueryOrderBy"]] = None, + **kwargs + ): + super(RunFilterParameters, self).__init__(**kwargs) + self.continuation_token = continuation_token + self.last_updated_after = last_updated_after + self.last_updated_before = last_updated_before + self.filters = filters + self.order_by = order_by -class PipelineFolder(msrest.serialization.Model): - """The folder that this Pipeline is in. If not specified, Pipeline will appear at the root level. +class RunQueryFilter(msrest.serialization.Model): + """Query filter option for listing runs. - :param name: The name of the folder that this Pipeline is in. - :type name: str + All required parameters must be populated in order to send to Azure. + + :param operand: Required. Parameter name to be used for filter. The allowed operands to query + pipeline runs are PipelineName, RunStart, RunEnd and Status; to query activity runs are + ActivityName, ActivityRunStart, ActivityRunEnd, ActivityType and Status, and to query trigger + runs are TriggerName, TriggerRunTimestamp and Status. Possible values include: "PipelineName", + "Status", "RunStart", "RunEnd", "ActivityName", "ActivityRunStart", "ActivityRunEnd", + "ActivityType", "TriggerName", "TriggerRunTimestamp", "RunGroupId", "LatestOnly". + :type operand: str or ~azure.synapse.artifacts.models.RunQueryFilterOperand + :param operator: Required. Operator to be used for filter. Possible values include: "Equals", + "NotEquals", "In", "NotIn". + :type operator: str or ~azure.synapse.artifacts.models.RunQueryFilterOperator + :param values: Required. List of filter values. + :type values: list[str] """ + _validation = { + 'operand': {'required': True}, + 'operator': {'required': True}, + 'values': {'required': True}, + } + _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, + 'operand': {'key': 'operand', 'type': 'str'}, + 'operator': {'key': 'operator', 'type': 'str'}, + 'values': {'key': 'values', 'type': '[str]'}, } def __init__( self, *, - name: Optional[str] = None, + operand: Union[str, "RunQueryFilterOperand"], + operator: Union[str, "RunQueryFilterOperator"], + values: List[str], **kwargs ): - super(PipelineFolder, self).__init__(**kwargs) - self.name = name + super(RunQueryFilter, self).__init__(**kwargs) + self.operand = operand + self.operator = operator + self.values = values -class PipelineListResponse(msrest.serialization.Model): - """A list of pipeline resources. +class RunQueryOrderBy(msrest.serialization.Model): + """An object to provide order by options for listing runs. All required parameters must be populated in order to send to Azure. - :param value: Required. List of pipelines. - :type value: list[~azure.synapse.artifacts.models.PipelineResource] - :param next_link: The link to the next page of results, if any remaining results exist. - :type next_link: str + :param order_by: Required. Parameter name to be used for order by. The allowed parameters to + order by for pipeline runs are PipelineName, RunStart, RunEnd and Status; for activity runs are + ActivityName, ActivityRunStart, ActivityRunEnd and Status; for trigger runs are TriggerName, + TriggerRunTimestamp and Status. Possible values include: "RunStart", "RunEnd", "PipelineName", + "Status", "ActivityName", "ActivityRunStart", "ActivityRunEnd", "TriggerName", + "TriggerRunTimestamp". + :type order_by: str or ~azure.synapse.artifacts.models.RunQueryOrderByField + :param order: Required. Sorting order of the parameter. Possible values include: "ASC", "DESC". + :type order: str or ~azure.synapse.artifacts.models.RunQueryOrder """ _validation = { - 'value': {'required': True}, + 'order_by': {'required': True}, + 'order': {'required': True}, } _attribute_map = { - 'value': {'key': 'value', 'type': '[PipelineResource]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'order_by': {'key': 'orderBy', 'type': 'str'}, + 'order': {'key': 'order', 'type': 'str'}, } def __init__( self, *, - value: List["PipelineResource"], - next_link: Optional[str] = None, + order_by: Union[str, "RunQueryOrderByField"], + order: Union[str, "RunQueryOrder"], **kwargs ): - super(PipelineListResponse, self).__init__(**kwargs) - self.value = value - self.next_link = next_link - + super(RunQueryOrderBy, self).__init__(**kwargs) + self.order_by = order_by + self.order = order -class PipelineReference(msrest.serialization.Model): - """Pipeline reference type. - Variables are only populated by the server, and will be ignored when sending a request. +class SalesforceLinkedService(LinkedService): + """Linked service for Salesforce. All required parameters must be populated in order to send to Azure. - :ivar type: Required. Pipeline reference type. Default value: "PipelineReference". - :vartype type: str - :param reference_name: Required. Reference pipeline name. - :type reference_name: str - :param name: Reference name. - :type name: str + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param environment_url: The URL of Salesforce instance. Default is + 'https://login.salesforce.com'. To copy data from sandbox, specify + 'https://test.salesforce.com'. To copy data from custom domain, specify, for example, + 'https://[domain].my.salesforce.com'. Type: string (or Expression with resultType string). + :type environment_url: object + :param username: The username for Basic authentication of the Salesforce instance. Type: string + (or Expression with resultType string). + :type username: object + :param password: The password for Basic authentication of the Salesforce instance. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param security_token: The security token is required to remotely access Salesforce instance. + :type security_token: ~azure.synapse.artifacts.models.SecretBase + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object """ _validation = { - 'type': {'required': True, 'constant': True}, - 'reference_name': {'required': True}, + 'type': {'required': True}, } _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'reference_name': {'key': 'referenceName', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'environment_url': {'key': 'typeProperties.environmentUrl', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'security_token': {'key': 'typeProperties.securityToken', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - type = "PipelineReference" - def __init__( self, *, - reference_name: str, - name: Optional[str] = None, + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + environment_url: Optional[object] = None, + username: Optional[object] = None, + password: Optional["SecretBase"] = None, + security_token: Optional["SecretBase"] = None, + encrypted_credential: Optional[object] = None, **kwargs ): - super(PipelineReference, self).__init__(**kwargs) - self.reference_name = reference_name - self.name = name + super(SalesforceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'Salesforce' # type: str + self.environment_url = environment_url + self.username = username + self.password = password + self.security_token = security_token + self.encrypted_credential = encrypted_credential -class PipelineResource(SubResource): - """Pipeline resource type. +class SalesforceMarketingCloudLinkedService(LinkedService): + """Salesforce Marketing Cloud linked service. - Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param description: The description of the pipeline. + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. :type description: str - :param activities: List of activities in pipeline. - :type activities: list[~azure.synapse.artifacts.models.Activity] - :param parameters: List of parameters for pipeline. + :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param variables: List of variables for pipeline. - :type variables: dict[str, ~azure.synapse.artifacts.models.VariableSpecification] - :param concurrency: The max number of concurrent runs for the pipeline. - :type concurrency: int - :param annotations: List of tags that can be used for describing the Pipeline. + :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param run_dimensions: Dimensions emitted by Pipeline. - :type run_dimensions: dict[str, object] - :param folder: The folder that this Pipeline is in. If not specified, Pipeline will appear at - the root level. - :type folder: ~azure.synapse.artifacts.models.PipelineFolder + :param client_id: Required. The client ID associated with the Salesforce Marketing Cloud + application. Type: string (or Expression with resultType string). + :type client_id: object + :param client_secret: The client secret associated with the Salesforce Marketing Cloud + application. Type: string (or Expression with resultType string). + :type client_secret: ~azure.synapse.artifacts.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using + HTTPS. The default value is true. Type: boolean (or Expression with resultType boolean). + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name in the server's + certificate to match the host name of the server when connecting over SSL. The default value is + true. Type: boolean (or Expression with resultType boolean). + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of the server when + connecting over SSL. The default value is true. Type: boolean (or Expression with resultType + boolean). + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'concurrency': {'minimum': 1}, + 'type': {'required': True}, + 'client_id': {'required': True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'properties.description', 'type': 'str'}, - 'activities': {'key': 'properties.activities', 'type': '[Activity]'}, - 'parameters': {'key': 'properties.parameters', 'type': '{ParameterSpecification}'}, - 'variables': {'key': 'properties.variables', 'type': '{VariableSpecification}'}, - 'concurrency': {'key': 'properties.concurrency', 'type': 'int'}, - 'annotations': {'key': 'properties.annotations', 'type': '[object]'}, - 'run_dimensions': {'key': 'properties.runDimensions', 'type': '{object}'}, - 'folder': {'key': 'properties.folder', 'type': 'PipelineFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, *, + client_id: object, additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, - activities: Optional[List["Activity"]] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - variables: Optional[Dict[str, "VariableSpecification"]] = None, - concurrency: Optional[int] = None, annotations: Optional[List[object]] = None, - run_dimensions: Optional[Dict[str, object]] = None, - folder: Optional["PipelineFolder"] = None, - **kwargs - ): - super(PipelineResource, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.description = description - self.activities = activities - self.parameters = parameters - self.variables = variables - self.concurrency = concurrency - self.annotations = annotations - self.run_dimensions = run_dimensions - self.folder = folder + client_secret: Optional["SecretBase"] = None, + use_encrypted_endpoints: Optional[object] = None, + use_host_verification: Optional[object] = None, + use_peer_verification: Optional[object] = None, + encrypted_credential: Optional[object] = None, + **kwargs + ): + super(SalesforceMarketingCloudLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'SalesforceMarketingCloud' # type: str + self.client_id = client_id + self.client_secret = client_secret + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential -class PipelineRun(msrest.serialization.Model): - """Information about a pipeline run. +class SalesforceMarketingCloudObjectDataset(Dataset): + """Salesforce Marketing Cloud dataset. - Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :ivar run_id: Identifier of a run. - :vartype run_id: str - :ivar run_group_id: Identifier that correlates all the recovery runs of a pipeline run. - :vartype run_group_id: str - :ivar is_latest: Indicates if the recovered pipeline run is the latest in its group. - :vartype is_latest: bool - :ivar pipeline_name: The pipeline name. - :vartype pipeline_name: str - :ivar parameters: The full or partial list of parameter name, value pair used in the pipeline - run. - :vartype parameters: dict[str, str] - :ivar invoked_by: Entity that started the pipeline run. - :vartype invoked_by: ~azure.synapse.artifacts.models.PipelineRunInvokedBy - :ivar last_updated: The last updated timestamp for the pipeline run event in ISO8601 format. - :vartype last_updated: ~datetime.datetime - :ivar run_start: The start time of a pipeline run in ISO8601 format. - :vartype run_start: ~datetime.datetime - :ivar run_end: The end time of a pipeline run in ISO8601 format. - :vartype run_end: ~datetime.datetime - :ivar duration_in_ms: The duration of a pipeline run. - :vartype duration_in_ms: int - :ivar status: The status of a pipeline run. - :vartype status: str - :ivar message: The message from a pipeline run. - :vartype message: str + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: The table name. Type: string (or Expression with resultType string). + :type table_name: object """ _validation = { - 'run_id': {'readonly': True}, - 'run_group_id': {'readonly': True}, - 'is_latest': {'readonly': True}, - 'pipeline_name': {'readonly': True}, - 'parameters': {'readonly': True}, - 'invoked_by': {'readonly': True}, - 'last_updated': {'readonly': True}, - 'run_start': {'readonly': True}, - 'run_end': {'readonly': True}, - 'duration_in_ms': {'readonly': True}, - 'status': {'readonly': True}, - 'message': {'readonly': True}, + 'type': {'required': True}, + 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'run_id': {'key': 'runId', 'type': 'str'}, - 'run_group_id': {'key': 'runGroupId', 'type': 'str'}, - 'is_latest': {'key': 'isLatest', 'type': 'bool'}, - 'pipeline_name': {'key': 'pipelineName', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{str}'}, - 'invoked_by': {'key': 'invokedBy', 'type': 'PipelineRunInvokedBy'}, - 'last_updated': {'key': 'lastUpdated', 'type': 'iso-8601'}, - 'run_start': {'key': 'runStart', 'type': 'iso-8601'}, - 'run_end': {'key': 'runEnd', 'type': 'iso-8601'}, - 'duration_in_ms': {'key': 'durationInMs', 'type': 'int'}, - 'status': {'key': 'status', 'type': 'str'}, - 'message': {'key': 'message', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } def __init__( self, *, + linked_service_name: "LinkedServiceReference", additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + table_name: Optional[object] = None, **kwargs ): - super(PipelineRun, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.run_id = None - self.run_group_id = None - self.is_latest = None - self.pipeline_name = None - self.parameters = None - self.invoked_by = None - self.last_updated = None - self.run_start = None - self.run_end = None - self.duration_in_ms = None - self.status = None - self.message = None + super(SalesforceMarketingCloudObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'SalesforceMarketingCloudObject' # type: str + self.table_name = table_name -class PipelineRunInvokedBy(msrest.serialization.Model): - """Provides entity name and id that started the pipeline run. +class SalesforceMarketingCloudSource(TabularSource): + """A copy activity Salesforce Marketing Cloud source. - Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. - :ivar name: Name of the entity that started the pipeline run. - :vartype name: str - :ivar id: The ID of the entity that started the run. - :vartype id: str - :ivar invoked_by_type: The type of the entity that started the run. - :vartype invoked_by_type: str + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object """ _validation = { - 'name': {'readonly': True}, - 'id': {'readonly': True}, - 'invoked_by_type': {'readonly': True}, + 'type': {'required': True}, } _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'id': {'key': 'id', 'type': 'str'}, - 'invoked_by_type': {'key': 'invokedByType', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, } def __init__( self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, **kwargs ): - super(PipelineRunInvokedBy, self).__init__(**kwargs) - self.name = None - self.id = None - self.invoked_by_type = None + super(SalesforceMarketingCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'SalesforceMarketingCloudSource' # type: str + self.query = query -class PipelineRunsQueryResponse(msrest.serialization.Model): - """A list pipeline runs. +class SalesforceObjectDataset(Dataset): + """The Salesforce object dataset. All required parameters must be populated in order to send to Azure. - :param value: Required. List of pipeline runs. - :type value: list[~azure.synapse.artifacts.models.PipelineRun] - :param continuation_token: The continuation token for getting the next page of results, if any - remaining results exist, null otherwise. - :type continuation_token: str + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param object_api_name: The Salesforce object API name. Type: string (or Expression with + resultType string). + :type object_api_name: object """ _validation = { - 'value': {'required': True}, + 'type': {'required': True}, + 'linked_service_name': {'required': True}, } _attribute_map = { - 'value': {'key': 'value', 'type': '[PipelineRun]'}, - 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'object_api_name': {'key': 'typeProperties.objectApiName', 'type': 'object'}, } def __init__( self, *, - value: List["PipelineRun"], - continuation_token: Optional[str] = None, + linked_service_name: "LinkedServiceReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + object_api_name: Optional[object] = None, **kwargs ): - super(PipelineRunsQueryResponse, self).__init__(**kwargs) - self.value = value - self.continuation_token = continuation_token + super(SalesforceObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'SalesforceObject' # type: str + self.object_api_name = object_api_name -class PostgreSqlLinkedService(LinkedService): - """Linked service for PostgreSQL data source. +class SalesforceServiceCloudLinkedService(LinkedService): + """Linked service for Salesforce Service Cloud. All required parameters must be populated in order to send to Azure. @@ -15994,10 +27265,21 @@ class PostgreSqlLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param connection_string: Required. The connection string. - :type connection_string: object - :param password: The Azure key vault secret reference of password in connection string. - :type password: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param environment_url: The URL of Salesforce Service Cloud instance. Default is + 'https://login.salesforce.com'. To copy data from sandbox, specify + 'https://test.salesforce.com'. To copy data from custom domain, specify, for example, + 'https://[domain].my.salesforce.com'. Type: string (or Expression with resultType string). + :type environment_url: object + :param username: The username for Basic authentication of the Salesforce instance. Type: string + (or Expression with resultType string). + :type username: object + :param password: The password for Basic authentication of the Salesforce instance. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param security_token: The security token is required to remotely access Salesforce instance. + :type security_token: ~azure.synapse.artifacts.models.SecretBase + :param extended_properties: Extended properties appended to the connection string. Type: string + (or Expression with resultType string). + :type extended_properties: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -16006,7 +27288,6 @@ class PostgreSqlLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'connection_string': {'required': True}, } _attribute_map = { @@ -16016,33 +27297,42 @@ class PostgreSqlLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'environment_url': {'key': 'typeProperties.environmentUrl', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'security_token': {'key': 'typeProperties.securityToken', 'type': 'SecretBase'}, + 'extended_properties': {'key': 'typeProperties.extendedProperties', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, *, - connection_string: object, additional_properties: Optional[Dict[str, object]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, - password: Optional["AzureKeyVaultSecretReference"] = None, + environment_url: Optional[object] = None, + username: Optional[object] = None, + password: Optional["SecretBase"] = None, + security_token: Optional["SecretBase"] = None, + extended_properties: Optional[object] = None, encrypted_credential: Optional[object] = None, **kwargs ): - super(PostgreSqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'PostgreSql' - self.connection_string = connection_string + super(SalesforceServiceCloudLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'SalesforceServiceCloud' # type: str + self.environment_url = environment_url + self.username = username self.password = password + self.security_token = security_token + self.extended_properties = extended_properties self.encrypted_credential = encrypted_credential -class PostgreSqlTableDataset(Dataset): - """The PostgreSQL table dataset. +class SalesforceServiceCloudObjectDataset(Dataset): + """The Salesforce Service Cloud object dataset. All required parameters must be populated in order to send to Azure. @@ -16068,14 +27358,9 @@ class PostgreSqlTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table - properties instead. - :type table_name: object - :param table: The PostgreSQL table name. Type: string (or Expression with resultType string). - :type table: object - :param schema_type_properties_schema: The PostgreSQL schema name. Type: string (or Expression - with resultType string). - :type schema_type_properties_schema: object + :param object_api_name: The Salesforce Service Cloud object API name. Type: string (or + Expression with resultType string). + :type object_api_name: object """ _validation = { @@ -16093,9 +27378,7 @@ class PostgreSqlTableDataset(Dataset): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'object_api_name': {'key': 'typeProperties.objectApiName', 'type': 'object'}, } def __init__( @@ -16109,153 +27392,291 @@ def __init__( parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, - table: Optional[object] = None, - schema_type_properties_schema: Optional[object] = None, + object_api_name: Optional[object] = None, **kwargs ): - super(PostgreSqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'PostgreSqlTable' - self.table_name = table_name - self.table = table - self.schema_type_properties_schema = schema_type_properties_schema + super(SalesforceServiceCloudObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'SalesforceServiceCloudObject' # type: str + self.object_api_name = object_api_name -class PrestoLinkedService(LinkedService): - """Presto server linked service. +class SalesforceServiceCloudSink(CopySink): + """A copy activity Salesforce Service Cloud sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Copy sink type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param host: Required. The IP address or host name of the Presto server. (i.e. - 192.168.222.160). - :type host: object - :param server_version: Required. The version of the Presto server. (i.e. 0.148-t). - :type server_version: object - :param catalog: Required. The catalog context for all request against the server. - :type catalog: object - :param port: The TCP port that the Presto server uses to listen for client connections. The - default value is 8080. - :type port: object - :param authentication_type: Required. The authentication mechanism used to connect to the - Presto server. Possible values include: "Anonymous", "LDAP". - :type authentication_type: str or ~azure.synapse.artifacts.models.PrestoAuthenticationType - :param username: The user name used to connect to the Presto server. - :type username: object - :param password: The password corresponding to the user name. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The - default value is false. - :type enable_ssl: object - :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for - verifying the server when connecting over SSL. This property can only be set when using SSL on - self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust - store or from a specified PEM file. The default value is false. - :type use_system_trust_store: object - :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate - name to match the host name of the server when connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: object - :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from - the server. The default value is false. - :type allow_self_signed_server_cert: object - :param time_zone_id: The local time zone used by the connection. Valid values for this option - are specified in the IANA Time Zone Database. The default value is the system time zone. - :type time_zone_id: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param write_behavior: The write behavior for the operation. Default is Insert. Possible values + include: "Insert", "Upsert". + :type write_behavior: str or ~azure.synapse.artifacts.models.SalesforceSinkWriteBehavior + :param external_id_field_name: The name of the external ID field for upsert operation. Default + value is 'Id' column. Type: string (or Expression with resultType string). + :type external_id_field_name: object + :param ignore_null_values: The flag indicating whether or not to ignore null values from input + dataset (except key fields) during write operation. Default value is false. If set it to true, + it means ADF will leave the data in the destination object unchanged when doing upsert/update + operation and insert defined default value when doing insert operation, versus ADF will update + the data in the destination object to NULL when doing upsert/update operation and insert NULL + value when doing insert operation. Type: boolean (or Expression with resultType boolean). + :type ignore_null_values: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, + 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + write_behavior: Optional[Union[str, "SalesforceSinkWriteBehavior"]] = None, + external_id_field_name: Optional[object] = None, + ignore_null_values: Optional[object] = None, + **kwargs + ): + super(SalesforceServiceCloudSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'SalesforceServiceCloudSink' # type: str + self.write_behavior = write_behavior + self.external_id_field_name = external_id_field_name + self.ignore_null_values = ignore_null_values + + +class SalesforceServiceCloudSource(CopySource): + """A copy activity Salesforce Service Cloud source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query: Database query. Type: string (or Expression with resultType string). + :type query: object + :param read_behavior: The read behavior for the operation. Default is Query. Possible values + include: "Query", "QueryAll". + :type read_behavior: str or ~azure.synapse.artifacts.models.SalesforceSourceReadBehavior + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query: Optional[object] = None, + read_behavior: Optional[Union[str, "SalesforceSourceReadBehavior"]] = None, + **kwargs + ): + super(SalesforceServiceCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'SalesforceServiceCloudSource' # type: str + self.query = query + self.read_behavior = read_behavior + + +class SalesforceSink(CopySink): + """A copy activity Salesforce sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param write_behavior: The write behavior for the operation. Default is Insert. Possible values + include: "Insert", "Upsert". + :type write_behavior: str or ~azure.synapse.artifacts.models.SalesforceSinkWriteBehavior + :param external_id_field_name: The name of the external ID field for upsert operation. Default + value is 'Id' column. Type: string (or Expression with resultType string). + :type external_id_field_name: object + :param ignore_null_values: The flag indicating whether or not to ignore null values from input + dataset (except key fields) during write operation. Default value is false. If set it to true, + it means ADF will leave the data in the destination object unchanged when doing upsert/update + operation and insert defined default value when doing insert operation, versus ADF will update + the data in the destination object to NULL when doing upsert/update operation and insert NULL + value when doing insert operation. Type: boolean (or Expression with resultType boolean). + :type ignore_null_values: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, + 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + write_behavior: Optional[Union[str, "SalesforceSinkWriteBehavior"]] = None, + external_id_field_name: Optional[object] = None, + ignore_null_values: Optional[object] = None, + **kwargs + ): + super(SalesforceSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'SalesforceSink' # type: str + self.write_behavior = write_behavior + self.external_id_field_name = external_id_field_name + self.ignore_null_values = ignore_null_values + + +class SalesforceSource(TabularSource): + """A copy activity Salesforce source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: Database query. Type: string (or Expression with resultType string). + :type query: object + :param read_behavior: The read behavior for the operation. Default is Query. Possible values + include: "Query", "QueryAll". + :type read_behavior: str or ~azure.synapse.artifacts.models.SalesforceSourceReadBehavior """ _validation = { 'type': {'required': True}, - 'host': {'required': True}, - 'server_version': {'required': True}, - 'catalog': {'required': True}, - 'authentication_type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'server_version': {'key': 'typeProperties.serverVersion', 'type': 'object'}, - 'catalog': {'key': 'typeProperties.catalog', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, - 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, - 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, - 'time_zone_id': {'key': 'typeProperties.timeZoneID', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, } def __init__( self, *, - host: object, - server_version: object, - catalog: object, - authentication_type: Union[str, "PrestoAuthenticationType"], additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, - description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - port: Optional[object] = None, - username: Optional[object] = None, - password: Optional["SecretBase"] = None, - enable_ssl: Optional[object] = None, - trusted_cert_path: Optional[object] = None, - use_system_trust_store: Optional[object] = None, - allow_host_name_cn_mismatch: Optional[object] = None, - allow_self_signed_server_cert: Optional[object] = None, - time_zone_id: Optional[object] = None, - encrypted_credential: Optional[object] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + read_behavior: Optional[Union[str, "SalesforceSourceReadBehavior"]] = None, **kwargs ): - super(PrestoLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Presto' - self.host = host - self.server_version = server_version - self.catalog = catalog - self.port = port - self.authentication_type = authentication_type - self.username = username - self.password = password - self.enable_ssl = enable_ssl - self.trusted_cert_path = trusted_cert_path - self.use_system_trust_store = use_system_trust_store - self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch - self.allow_self_signed_server_cert = allow_self_signed_server_cert - self.time_zone_id = time_zone_id - self.encrypted_credential = encrypted_credential + super(SalesforceSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'SalesforceSource' # type: str + self.query = query + self.read_behavior = read_behavior -class PrestoObjectDataset(Dataset): - """Presto server dataset. +class SapBwCubeDataset(Dataset): + """The SAP BW cube dataset. All required parameters must be populated in order to send to Azure. @@ -16281,15 +27702,6 @@ class PrestoObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table - properties instead. - :type table_name: object - :param table: The table name of the Presto. Type: string (or Expression with resultType - string). - :type table: object - :param schema_type_properties_schema: The schema name of the Presto. Type: string (or - Expression with resultType string). - :type schema_type_properties_schema: object """ _validation = { @@ -16307,9 +27719,6 @@ class PrestoObjectDataset(Dataset): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } def __init__( @@ -16323,46 +27732,14 @@ def __init__( parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, - table: Optional[object] = None, - schema_type_properties_schema: Optional[object] = None, - **kwargs - ): - super(PrestoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'PrestoObject' - self.table_name = table_name - self.table = table - self.schema_type_properties_schema = schema_type_properties_schema - - -class QueryDataFlowDebugSessionsResponse(msrest.serialization.Model): - """A list of active debug sessions. - - :param value: Array with all active debug sessions. - :type value: list[~azure.synapse.artifacts.models.DataFlowDebugSessionInfo] - :param next_link: The link to the next page of results, if any remaining results exist. - :type next_link: str - """ - - _attribute_map = { - 'value': {'key': 'value', 'type': '[DataFlowDebugSessionInfo]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - def __init__( - self, - *, - value: Optional[List["DataFlowDebugSessionInfo"]] = None, - next_link: Optional[str] = None, **kwargs ): - super(QueryDataFlowDebugSessionsResponse, self).__init__(**kwargs) - self.value = value - self.next_link = next_link + super(SapBwCubeDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'SapBwCube' # type: str -class QuickBooksLinkedService(LinkedService): - """QuickBooks server linked service. +class SapBWLinkedService(LinkedService): + """SAP Business Warehouse Linked Service. All required parameters must be populated in order to send to Azure. @@ -16379,22 +27756,20 @@ class QuickBooksLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param endpoint: Required. The endpoint of the QuickBooks server. (i.e. - quickbooks.api.intuit.com). - :type endpoint: object - :param company_id: Required. The company ID of the QuickBooks company to authorize. - :type company_id: object - :param consumer_key: Required. The consumer key for OAuth 1.0 authentication. - :type consumer_key: object - :param consumer_secret: Required. The consumer secret for OAuth 1.0 authentication. - :type consumer_secret: ~azure.synapse.artifacts.models.SecretBase - :param access_token: Required. The access token for OAuth 1.0 authentication. - :type access_token: ~azure.synapse.artifacts.models.SecretBase - :param access_token_secret: Required. The access token secret for OAuth 1.0 authentication. - :type access_token_secret: ~azure.synapse.artifacts.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using - HTTPS. The default value is true. - :type use_encrypted_endpoints: object + :param server: Required. Host name of the SAP BW instance. Type: string (or Expression with + resultType string). + :type server: object + :param system_number: Required. System number of the BW system. (Usually a two-digit decimal + number represented as a string.) Type: string (or Expression with resultType string). + :type system_number: object + :param client_id: Required. Client ID of the client on the BW system. (Usually a three-digit + decimal number represented as a string) Type: string (or Expression with resultType string). + :type client_id: object + :param user_name: Username to access the SAP BW server. Type: string (or Expression with + resultType string). + :type user_name: object + :param password: Password to access the SAP BW server. + :type password: ~azure.synapse.artifacts.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -16403,12 +27778,9 @@ class QuickBooksLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'endpoint': {'required': True}, - 'company_id': {'required': True}, - 'consumer_key': {'required': True}, - 'consumer_secret': {'required': True}, - 'access_token': {'required': True}, - 'access_token_secret': {'required': True}, + 'server': {'required': True}, + 'system_number': {'required': True}, + 'client_id': {'required': True}, } _attribute_map = { @@ -16418,158 +27790,171 @@ class QuickBooksLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'company_id': {'key': 'typeProperties.companyId', 'type': 'object'}, - 'consumer_key': {'key': 'typeProperties.consumerKey', 'type': 'object'}, - 'consumer_secret': {'key': 'typeProperties.consumerSecret', 'type': 'SecretBase'}, - 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, - 'access_token_secret': {'key': 'typeProperties.accessTokenSecret', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'system_number': {'key': 'typeProperties.systemNumber', 'type': 'object'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, *, - endpoint: object, - company_id: object, - consumer_key: object, - consumer_secret: "SecretBase", - access_token: "SecretBase", - access_token_secret: "SecretBase", + server: object, + system_number: object, + client_id: object, additional_properties: Optional[Dict[str, object]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, - use_encrypted_endpoints: Optional[object] = None, + user_name: Optional[object] = None, + password: Optional["SecretBase"] = None, encrypted_credential: Optional[object] = None, **kwargs ): - super(QuickBooksLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'QuickBooks' - self.endpoint = endpoint - self.company_id = company_id - self.consumer_key = consumer_key - self.consumer_secret = consumer_secret - self.access_token = access_token - self.access_token_secret = access_token_secret - self.use_encrypted_endpoints = use_encrypted_endpoints + super(SapBWLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'SapBW' # type: str + self.server = server + self.system_number = system_number + self.client_id = client_id + self.user_name = user_name + self.password = password self.encrypted_credential = encrypted_credential -class QuickBooksObjectDataset(Dataset): - """QuickBooks server dataset. +class SapBwSource(TabularSource): + """A copy activity source for SapBW server via MDX. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: MDX query. Type: string (or Expression with resultType string). + :type query: object """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, } def __init__( - self, - *, - linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, **kwargs ): - super(QuickBooksObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'QuickBooksObject' - self.table_name = table_name + super(SapBwSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'SapBwSource' # type: str + self.query = query -class RedirectIncompatibleRowSettings(msrest.serialization.Model): - """Redirect incompatible row settings. +class SapCloudForCustomerLinkedService(LinkedService): + """Linked service for SAP Cloud for Customer. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param linked_service_name: Required. Name of the Azure Storage, Storage SAS, or Azure Data - Lake Store linked service used for redirecting incompatible row. Must be specified if - redirectIncompatibleRowSettings is specified. Type: string (or Expression with resultType - string). - :type linked_service_name: object - :param path: The path for storing the redirect incompatible row data. Type: string (or - Expression with resultType string). - :type path: object + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param url: Required. The URL of SAP Cloud for Customer OData API. For example, + '[https://[tenantname].crm.ondemand.com/sap/c4c/odata/v1]'. Type: string (or Expression with + resultType string). + :type url: object + :param username: The username for Basic authentication. Type: string (or Expression with + resultType string). + :type username: object + :param password: The password for Basic authentication. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Either encryptedCredential or + username/password must be provided. Type: string (or Expression with resultType string). + :type encrypted_credential: object """ _validation = { - 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'url': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'object'}, - 'path': {'key': 'path', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, *, - linked_service_name: object, + url: object, additional_properties: Optional[Dict[str, object]] = None, - path: Optional[object] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + username: Optional[object] = None, + password: Optional["SecretBase"] = None, + encrypted_credential: Optional[object] = None, **kwargs ): - super(RedirectIncompatibleRowSettings, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.linked_service_name = linked_service_name - self.path = path + super(SapCloudForCustomerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'SapCloudForCustomer' # type: str + self.url = url + self.username = username + self.password = password + self.encrypted_credential = encrypted_credential -class RelationalTableDataset(Dataset): - """The relational table dataset. +class SapCloudForCustomerResourceDataset(Dataset): + """The path of the SAP Cloud for Customer OData entity. All required parameters must be populated in order to send to Azure. @@ -16595,14 +27980,15 @@ class RelationalTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The relational table name. Type: string (or Expression with resultType - string). - :type table_name: object + :param path: Required. The path of the SAP Cloud for Customer OData entity. Type: string (or + Expression with resultType string). + :type path: object """ _validation = { 'type': {'required': True}, 'linked_service_name': {'required': True}, + 'path': {'required': True}, } _attribute_map = { @@ -16615,13 +28001,14 @@ class RelationalTableDataset(Dataset): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'path': {'key': 'typeProperties.path', 'type': 'object'}, } def __init__( self, *, linked_service_name: "LinkedServiceReference", + path: object, additional_properties: Optional[Dict[str, object]] = None, description: Optional[str] = None, structure: Optional[object] = None, @@ -16629,261 +28016,335 @@ def __init__( parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, **kwargs ): - super(RelationalTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'RelationalTable' - self.table_name = table_name - + super(SapCloudForCustomerResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'SapCloudForCustomerResource' # type: str + self.path = path -class RerunTriggerListResponse(msrest.serialization.Model): - """A list of rerun triggers. - Variables are only populated by the server, and will be ignored when sending a request. +class SapCloudForCustomerSink(CopySink): + """A copy activity SAP Cloud for Customer sink. All required parameters must be populated in order to send to Azure. - :param value: Required. List of rerun triggers. - :type value: list[~azure.synapse.artifacts.models.RerunTriggerResource] - :ivar next_link: The continuation token for getting the next page of results, if any remaining - results exist, null otherwise. - :vartype next_link: str + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param write_behavior: The write behavior for the operation. Default is 'Insert'. Possible + values include: "Insert", "Update". + :type write_behavior: str or + ~azure.synapse.artifacts.models.SapCloudForCustomerSinkWriteBehavior """ _validation = { - 'value': {'required': True}, - 'next_link': {'readonly': True}, + 'type': {'required': True}, } _attribute_map = { - 'value': {'key': 'value', 'type': '[RerunTriggerResource]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, } def __init__( self, *, - value: List["RerunTriggerResource"], + additional_properties: Optional[Dict[str, object]] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + write_behavior: Optional[Union[str, "SapCloudForCustomerSinkWriteBehavior"]] = None, **kwargs ): - super(RerunTriggerListResponse, self).__init__(**kwargs) - self.value = value - self.next_link = None - + super(SapCloudForCustomerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'SapCloudForCustomerSink' # type: str + self.write_behavior = write_behavior -class RerunTriggerResource(SubResource): - """RerunTrigger resource type. - Variables are only populated by the server, and will be ignored when sending a request. +class SapCloudForCustomerSource(TabularSource): + """A copy activity source for SAP Cloud for Customer source. All required parameters must be populated in order to send to Azure. - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - :param properties: Required. Properties of the rerun trigger. - :type properties: ~azure.synapse.artifacts.models.RerunTumblingWindowTrigger + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: SAP Cloud for Customer OData query. For example, "$top=1". Type: string (or + Expression with resultType string). + :type query: object """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'properties': {'required': True}, + 'type': {'required': True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'RerunTumblingWindowTrigger'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, } def __init__( self, *, - properties: "RerunTumblingWindowTrigger", + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, **kwargs ): - super(RerunTriggerResource, self).__init__(**kwargs) - self.properties = properties - + super(SapCloudForCustomerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'SapCloudForCustomerSource' # type: str + self.query = query -class RerunTumblingWindowTrigger(Trigger): - """Trigger that schedules pipeline reruns for all fixed time interval windows from a requested start time to requested end time. - Variables are only populated by the server, and will be ignored when sending a request. +class SapEccLinkedService(LinkedService): + """Linked service for SAP ERP Central Component(SAP ECC). All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Trigger type.Constant filled by server. + :param type: Required. Type of linked service.Constant filled by server. :type type: str - :param description: Trigger description. + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. :type description: str - :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are - called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". - :vartype runtime_state: str or ~azure.synapse.artifacts.models.TriggerRuntimeState - :param annotations: List of tags that can be used for describing the trigger. + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param parent_trigger: The parent trigger reference. - :type parent_trigger: object - :param requested_start_time: Required. The start time for the time period for which restatement - is initiated. Only UTC time is currently supported. - :type requested_start_time: ~datetime.datetime - :param requested_end_time: Required. The end time for the time period for which restatement is - initiated. Only UTC time is currently supported. - :type requested_end_time: ~datetime.datetime - :param max_concurrency: Required. The max number of parallel time windows (ready for execution) - for which a rerun is triggered. - :type max_concurrency: int + :param url: Required. The URL of SAP ECC OData API. For example, + '[https://hostname:port/sap/opu/odata/sap/servicename/]'. Type: string (or Expression with + resultType string). + :type url: str + :param username: The username for Basic authentication. Type: string (or Expression with + resultType string). + :type username: str + :param password: The password for Basic authentication. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Either encryptedCredential or + username/password must be provided. Type: string (or Expression with resultType string). + :type encrypted_credential: str """ _validation = { 'type': {'required': True}, - 'runtime_state': {'readonly': True}, - 'requested_start_time': {'required': True}, - 'requested_end_time': {'required': True}, - 'max_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, + 'url': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, - 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'parent_trigger': {'key': 'typeProperties.parentTrigger', 'type': 'object'}, - 'requested_start_time': {'key': 'typeProperties.requestedStartTime', 'type': 'iso-8601'}, - 'requested_end_time': {'key': 'typeProperties.requestedEndTime', 'type': 'iso-8601'}, - 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, + 'url': {'key': 'typeProperties.url', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'str'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, } def __init__( self, *, - requested_start_time: datetime.datetime, - requested_end_time: datetime.datetime, - max_concurrency: int, + url: str, additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, - parent_trigger: Optional[object] = None, + username: Optional[str] = None, + password: Optional["SecretBase"] = None, + encrypted_credential: Optional[str] = None, **kwargs ): - super(RerunTumblingWindowTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) - self.type: str = 'RerunTumblingWindowTrigger' - self.parent_trigger = parent_trigger - self.requested_start_time = requested_start_time - self.requested_end_time = requested_end_time - self.max_concurrency = max_concurrency + super(SapEccLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'SapEcc' # type: str + self.url = url + self.username = username + self.password = password + self.encrypted_credential = encrypted_credential -class RerunTumblingWindowTriggerActionParameters(msrest.serialization.Model): - """Rerun tumbling window trigger Parameters. +class SapEccResourceDataset(Dataset): + """The path of the SAP ECC OData entity. All required parameters must be populated in order to send to Azure. - :param start_time: Required. The start time for the time period for which restatement is - initiated. Only UTC time is currently supported. - :type start_time: ~datetime.datetime - :param end_time: Required. The end time for the time period for which restatement is initiated. - Only UTC time is currently supported. - :type end_time: ~datetime.datetime - :param max_concurrency: Required. The max number of parallel time windows (ready for execution) - for which a rerun is triggered. - :type max_concurrency: int + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param path: Required. The path of the SAP ECC OData entity. Type: string (or Expression with + resultType string). + :type path: object """ _validation = { - 'start_time': {'required': True}, - 'end_time': {'required': True}, - 'max_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + 'path': {'required': True}, } _attribute_map = { - 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, - 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, - 'max_concurrency': {'key': 'maxConcurrency', 'type': 'int'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'path': {'key': 'typeProperties.path', 'type': 'object'}, } def __init__( self, *, - start_time: datetime.datetime, - end_time: datetime.datetime, - max_concurrency: int, + linked_service_name: "LinkedServiceReference", + path: object, + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, **kwargs ): - super(RerunTumblingWindowTriggerActionParameters, self).__init__(**kwargs) - self.start_time = start_time - self.end_time = end_time - self.max_concurrency = max_concurrency + super(SapEccResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'SapEccResource' # type: str + self.path = path -class Resource(msrest.serialization.Model): - """Azure Synapse top-level resource. +class SapEccSource(TabularSource): + """A copy activity source for SAP ECC source. - Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :param location: The resource location. - :type location: str - :param tags: A set of tags. The resource tags. - :type tags: dict[str, str] - :ivar e_tag: Etag identifies change in the resource. - :vartype e_tag: str + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: SAP ECC OData query. For example, "$top=1". Type: string (or Expression with + resultType string). + :type query: object """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'e_tag': {'readonly': True}, + 'type': {'required': True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'location': {'key': 'location', 'type': 'str'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'e_tag': {'key': 'eTag', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, } def __init__( self, *, - location: Optional[str] = None, - tags: Optional[Dict[str, str]] = None, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, **kwargs ): - super(Resource, self).__init__(**kwargs) - self.id = None - self.name = None - self.type = None - self.location = location - self.tags = tags - self.e_tag = None + super(SapEccSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'SapEccSource' # type: str + self.query = query -class ResponsysLinkedService(LinkedService): - """Responsys linked service. +class SapHanaLinkedService(LinkedService): + """SAP HANA Linked Service. All required parameters must be populated in order to send to Azure. @@ -16900,25 +28361,20 @@ class ResponsysLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param endpoint: Required. The endpoint of the Responsys server. - :type endpoint: object - :param client_id: Required. The client ID associated with the Responsys application. Type: - string (or Expression with resultType string). - :type client_id: object - :param client_secret: The client secret associated with the Responsys application. Type: string - (or Expression with resultType string). - :type client_secret: ~azure.synapse.artifacts.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using - HTTPS. The default value is true. Type: boolean (or Expression with resultType boolean). - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name in the server's - certificate to match the host name of the server when connecting over SSL. The default value is - true. Type: boolean (or Expression with resultType boolean). - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of the server when - connecting over SSL. The default value is true. Type: boolean (or Expression with resultType - boolean). - :type use_peer_verification: object + :param connection_string: SAP HANA ODBC connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param server: Required. Host name of the SAP HANA server. Type: string (or Expression with + resultType string). + :type server: object + :param authentication_type: The authentication type to be used to connect to the SAP HANA + server. Possible values include: "Basic", "Windows". + :type authentication_type: str or ~azure.synapse.artifacts.models.SapHanaAuthenticationType + :param user_name: Username to access the SAP HANA server. Type: string (or Expression with + resultType string). + :type user_name: object + :param password: Password to access the SAP HANA server. + :type password: ~azure.synapse.artifacts.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -16927,8 +28383,7 @@ class ResponsysLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'endpoint': {'required': True}, - 'client_id': {'required': True}, + 'server': {'required': True}, } _attribute_map = { @@ -16938,113 +28393,138 @@ class ResponsysLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, *, - endpoint: object, - client_id: object, + server: object, additional_properties: Optional[Dict[str, object]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, - client_secret: Optional["SecretBase"] = None, - use_encrypted_endpoints: Optional[object] = None, - use_host_verification: Optional[object] = None, - use_peer_verification: Optional[object] = None, + connection_string: Optional[object] = None, + authentication_type: Optional[Union[str, "SapHanaAuthenticationType"]] = None, + user_name: Optional[object] = None, + password: Optional["SecretBase"] = None, encrypted_credential: Optional[object] = None, **kwargs ): - super(ResponsysLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Responsys' - self.endpoint = endpoint - self.client_id = client_id - self.client_secret = client_secret - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification + super(SapHanaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'SapHana' # type: str + self.connection_string = connection_string + self.server = server + self.authentication_type = authentication_type + self.user_name = user_name + self.password = password self.encrypted_credential = encrypted_credential -class ResponsysObjectDataset(Dataset): - """Responsys dataset. +class SapHanaPartitionSettings(msrest.serialization.Model): + """The settings that will be leveraged for SAP HANA source partitioning. + + :param partition_column_name: The name of the column that will be used for proceeding range + partitioning. Type: string (or Expression with resultType string). + :type partition_column_name: object + """ + + _attribute_map = { + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + } + + def __init__( + self, + *, + partition_column_name: Optional[object] = None, + **kwargs + ): + super(SapHanaPartitionSettings, self).__init__(**kwargs) + self.partition_column_name = partition_column_name + + +class SapHanaSource(TabularSource): + """A copy activity source for SAP HANA source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: SAP HANA Sql query. Type: string (or Expression with resultType string). + :type query: object + :param packet_size: The packet size of data read from SAP HANA. Type: integer(or Expression + with resultType integer). + :type packet_size: object + :param partition_option: The partition mechanism that will be used for SAP HANA read in + parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "SapHanaDynamicRange". + :type partition_option: str or ~azure.synapse.artifacts.models.SapHanaPartitionOption + :param partition_settings: The settings that will be leveraged for SAP HANA source + partitioning. + :type partition_settings: ~azure.synapse.artifacts.models.SapHanaPartitionSettings """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + 'packet_size': {'key': 'packetSize', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'SapHanaPartitionSettings'}, } def __init__( self, *, - linked_service_name: "LinkedServiceReference", additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + packet_size: Optional[object] = None, + partition_option: Optional[Union[str, "SapHanaPartitionOption"]] = None, + partition_settings: Optional["SapHanaPartitionSettings"] = None, **kwargs ): - super(ResponsysObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'ResponsysObject' - self.table_name = table_name + super(SapHanaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'SapHanaSource' # type: str + self.query = query + self.packet_size = packet_size + self.partition_option = partition_option + self.partition_settings = partition_settings -class RestResourceDataset(Dataset): - """A Rest service dataset. +class SapHanaTableDataset(Dataset): + """SAP HANA Table properties. All required parameters must be populated in order to send to Azure. @@ -17070,21 +28550,11 @@ class RestResourceDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param relative_url: The relative URL to the resource that the RESTful API provides. Type: - string (or Expression with resultType string). - :type relative_url: object - :param request_method: The HTTP method used to call the RESTful API. The default is GET. Type: - string (or Expression with resultType string). - :type request_method: object - :param request_body: The HTTP request body to the RESTful API if requestMethod is POST. Type: - string (or Expression with resultType string). - :type request_body: object - :param additional_headers: The additional HTTP headers in the request to the RESTful API. Type: - string (or Expression with resultType string). - :type additional_headers: object - :param pagination_rules: The pagination rules to compose next page requests. Type: string (or - Expression with resultType string). - :type pagination_rules: object + :param schema_type_properties_schema: The schema name of SAP HANA. Type: string (or Expression + with resultType string). + :type schema_type_properties_schema: object + :param table: The table name of SAP HANA. Type: string (or Expression with resultType string). + :type table: object """ _validation = { @@ -17102,11 +28572,8 @@ class RestResourceDataset(Dataset): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'relative_url': {'key': 'typeProperties.relativeUrl', 'type': 'object'}, - 'request_method': {'key': 'typeProperties.requestMethod', 'type': 'object'}, - 'request_body': {'key': 'typeProperties.requestBody', 'type': 'object'}, - 'additional_headers': {'key': 'typeProperties.additionalHeaders', 'type': 'object'}, - 'pagination_rules': {'key': 'typeProperties.paginationRules', 'type': 'object'}, + 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, } def __init__( @@ -17120,24 +28587,18 @@ def __init__( parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, folder: Optional["DatasetFolder"] = None, - relative_url: Optional[object] = None, - request_method: Optional[object] = None, - request_body: Optional[object] = None, - additional_headers: Optional[object] = None, - pagination_rules: Optional[object] = None, + schema_type_properties_schema: Optional[object] = None, + table: Optional[object] = None, **kwargs ): - super(RestResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'RestResource' - self.relative_url = relative_url - self.request_method = request_method - self.request_body = request_body - self.additional_headers = additional_headers - self.pagination_rules = pagination_rules + super(SapHanaTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'SapHanaTable' # type: str + self.schema_type_properties_schema = schema_type_properties_schema + self.table = table -class RestServiceLinkedService(LinkedService): - """Rest Service linked service. +class SapOpenHubLinkedService(LinkedService): + """SAP Business Warehouse Open Hub Destination Linked Service. All required parameters must be populated in order to send to Azure. @@ -17154,31 +28615,26 @@ class RestServiceLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param url: Required. The base URL of the REST service. - :type url: object - :param enable_server_certificate_validation: Whether to validate server side SSL certificate - when connecting to the endpoint.The default value is true. Type: boolean (or Expression with - resultType boolean). - :type enable_server_certificate_validation: object - :param authentication_type: Required. Type of authentication used to connect to the REST - service. Possible values include: "Anonymous", "Basic", "AadServicePrincipal", - "ManagedServiceIdentity". - :type authentication_type: str or ~azure.synapse.artifacts.models.RestServiceAuthenticationType - :param user_name: The user name used in Basic authentication type. + :param server: Required. Host name of the SAP BW instance where the open hub destination is + located. Type: string (or Expression with resultType string). + :type server: object + :param system_number: Required. System number of the BW system where the open hub destination + is located. (Usually a two-digit decimal number represented as a string.) Type: string (or + Expression with resultType string). + :type system_number: object + :param client_id: Required. Client ID of the client on the BW system where the open hub + destination is located. (Usually a three-digit decimal number represented as a string) Type: + string (or Expression with resultType string). + :type client_id: object + :param language: Language of the BW system where the open hub destination is located. The + default value is EN. Type: string (or Expression with resultType string). + :type language: object + :param user_name: Username to access the SAP BW server where the open hub destination is + located. Type: string (or Expression with resultType string). :type user_name: object - :param password: The password used in Basic authentication type. + :param password: Password to access the SAP BW server where the open hub destination is + located. :type password: ~azure.synapse.artifacts.models.SecretBase - :param service_principal_id: The application's client ID used in AadServicePrincipal - authentication type. - :type service_principal_id: object - :param service_principal_key: The application's key used in AadServicePrincipal authentication - type. - :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase - :param tenant: The tenant information (domain name or tenant ID) used in AadServicePrincipal - authentication type under which your application resides. - :type tenant: object - :param aad_resource_id: The resource you are requesting authorization to use. - :type aad_resource_id: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -17187,277 +28643,204 @@ class RestServiceLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'url': {'required': True}, - 'authentication_type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, - 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, - 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - *, - url: object, - authentication_type: Union[str, "RestServiceAuthenticationType"], - additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, - description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - enable_server_certificate_validation: Optional[object] = None, - user_name: Optional[object] = None, - password: Optional["SecretBase"] = None, - service_principal_id: Optional[object] = None, - service_principal_key: Optional["SecretBase"] = None, - tenant: Optional[object] = None, - aad_resource_id: Optional[object] = None, - encrypted_credential: Optional[object] = None, - **kwargs - ): - super(RestServiceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'RestService' - self.url = url - self.enable_server_certificate_validation = enable_server_certificate_validation - self.authentication_type = authentication_type - self.user_name = user_name - self.password = password - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.tenant = tenant - self.aad_resource_id = aad_resource_id - self.encrypted_credential = encrypted_credential - - -class RunFilterParameters(msrest.serialization.Model): - """Query parameters for listing runs. - - All required parameters must be populated in order to send to Azure. - - :param continuation_token: The continuation token for getting the next page of results. Null - for first page. - :type continuation_token: str - :param last_updated_after: Required. The time at or after which the run event was updated in - 'ISO 8601' format. - :type last_updated_after: ~datetime.datetime - :param last_updated_before: Required. The time at or before which the run event was updated in - 'ISO 8601' format. - :type last_updated_before: ~datetime.datetime - :param filters: List of filters. - :type filters: list[~azure.synapse.artifacts.models.RunQueryFilter] - :param order_by: List of OrderBy option. - :type order_by: list[~azure.synapse.artifacts.models.RunQueryOrderBy] - """ - - _validation = { - 'last_updated_after': {'required': True}, - 'last_updated_before': {'required': True}, - } - - _attribute_map = { - 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, - 'last_updated_after': {'key': 'lastUpdatedAfter', 'type': 'iso-8601'}, - 'last_updated_before': {'key': 'lastUpdatedBefore', 'type': 'iso-8601'}, - 'filters': {'key': 'filters', 'type': '[RunQueryFilter]'}, - 'order_by': {'key': 'orderBy', 'type': '[RunQueryOrderBy]'}, - } - - def __init__( - self, - *, - last_updated_after: datetime.datetime, - last_updated_before: datetime.datetime, - continuation_token: Optional[str] = None, - filters: Optional[List["RunQueryFilter"]] = None, - order_by: Optional[List["RunQueryOrderBy"]] = None, - **kwargs - ): - super(RunFilterParameters, self).__init__(**kwargs) - self.continuation_token = continuation_token - self.last_updated_after = last_updated_after - self.last_updated_before = last_updated_before - self.filters = filters - self.order_by = order_by - - -class RunQueryFilter(msrest.serialization.Model): - """Query filter option for listing runs. - - All required parameters must be populated in order to send to Azure. - - :param operand: Required. Parameter name to be used for filter. The allowed operands to query - pipeline runs are PipelineName, RunStart, RunEnd and Status; to query activity runs are - ActivityName, ActivityRunStart, ActivityRunEnd, ActivityType and Status, and to query trigger - runs are TriggerName, TriggerRunTimestamp and Status. Possible values include: "PipelineName", - "Status", "RunStart", "RunEnd", "ActivityName", "ActivityRunStart", "ActivityRunEnd", - "ActivityType", "TriggerName", "TriggerRunTimestamp", "RunGroupId", "LatestOnly". - :type operand: str or ~azure.synapse.artifacts.models.RunQueryFilterOperand - :param operator: Required. Operator to be used for filter. Possible values include: "Equals", - "NotEquals", "In", "NotIn". - :type operator: str or ~azure.synapse.artifacts.models.RunQueryFilterOperator - :param values: Required. List of filter values. - :type values: list[str] - """ - - _validation = { - 'operand': {'required': True}, - 'operator': {'required': True}, - 'values': {'required': True}, + 'server': {'required': True}, + 'system_number': {'required': True}, + 'client_id': {'required': True}, } _attribute_map = { - 'operand': {'key': 'operand', 'type': 'str'}, - 'operator': {'key': 'operator', 'type': 'str'}, - 'values': {'key': 'values', 'type': '[str]'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'system_number': {'key': 'typeProperties.systemNumber', 'type': 'object'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'language': {'key': 'typeProperties.language', 'type': 'object'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, *, - operand: Union[str, "RunQueryFilterOperand"], - operator: Union[str, "RunQueryFilterOperator"], - values: List[str], + server: object, + system_number: object, + client_id: object, + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + language: Optional[object] = None, + user_name: Optional[object] = None, + password: Optional["SecretBase"] = None, + encrypted_credential: Optional[object] = None, **kwargs ): - super(RunQueryFilter, self).__init__(**kwargs) - self.operand = operand - self.operator = operator - self.values = values + super(SapOpenHubLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'SapOpenHub' # type: str + self.server = server + self.system_number = system_number + self.client_id = client_id + self.language = language + self.user_name = user_name + self.password = password + self.encrypted_credential = encrypted_credential -class RunQueryOrderBy(msrest.serialization.Model): - """An object to provide order by options for listing runs. +class SapOpenHubSource(TabularSource): + """A copy activity source for SAP Business Warehouse Open Hub Destination source. All required parameters must be populated in order to send to Azure. - :param order_by: Required. Parameter name to be used for order by. The allowed parameters to - order by for pipeline runs are PipelineName, RunStart, RunEnd and Status; for activity runs are - ActivityName, ActivityRunStart, ActivityRunEnd and Status; for trigger runs are TriggerName, - TriggerRunTimestamp and Status. Possible values include: "RunStart", "RunEnd", "PipelineName", - "Status", "ActivityName", "ActivityRunStart", "ActivityRunEnd", "TriggerName", - "TriggerRunTimestamp". - :type order_by: str or ~azure.synapse.artifacts.models.RunQueryOrderByField - :param order: Required. Sorting order of the parameter. Possible values include: "ASC", "DESC". - :type order: str or ~azure.synapse.artifacts.models.RunQueryOrder + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param exclude_last_request: Whether to exclude the records of the last request. The default + value is true. Type: boolean (or Expression with resultType boolean). + :type exclude_last_request: object + :param base_request_id: The ID of request for delta loading. Once it is set, only data with + requestId larger than the value of this property will be retrieved. The default value is 0. + Type: integer (or Expression with resultType integer ). + :type base_request_id: object """ _validation = { - 'order_by': {'required': True}, - 'order': {'required': True}, + 'type': {'required': True}, } _attribute_map = { - 'order_by': {'key': 'orderBy', 'type': 'str'}, - 'order': {'key': 'order', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'exclude_last_request': {'key': 'excludeLastRequest', 'type': 'object'}, + 'base_request_id': {'key': 'baseRequestId', 'type': 'object'}, } def __init__( self, *, - order_by: Union[str, "RunQueryOrderByField"], - order: Union[str, "RunQueryOrder"], + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + exclude_last_request: Optional[object] = None, + base_request_id: Optional[object] = None, **kwargs ): - super(RunQueryOrderBy, self).__init__(**kwargs) - self.order_by = order_by - self.order = order + super(SapOpenHubSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'SapOpenHubSource' # type: str + self.exclude_last_request = exclude_last_request + self.base_request_id = base_request_id -class SalesforceLinkedService(LinkedService): - """Linked service for Salesforce. +class SapOpenHubTableDataset(Dataset): + """Sap Business Warehouse Open Hub Destination Table properties. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Type of dataset.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. + :param description: Dataset description. :type description: str - :param parameters: Parameters for linked service. + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. + :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] - :param environment_url: The URL of Salesforce instance. Default is - 'https://login.salesforce.com'. To copy data from sandbox, specify - 'https://test.salesforce.com'. To copy data from custom domain, specify, for example, - 'https://[domain].my.salesforce.com'. Type: string (or Expression with resultType string). - :type environment_url: object - :param username: The username for Basic authentication of the Salesforce instance. Type: string - (or Expression with resultType string). - :type username: object - :param password: The password for Basic authentication of the Salesforce instance. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param security_token: The security token is required to remotely access Salesforce instance. - :type security_token: ~azure.synapse.artifacts.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param open_hub_destination_name: Required. The name of the Open Hub Destination with + destination type as Database Table. Type: string (or Expression with resultType string). + :type open_hub_destination_name: object + :param exclude_last_request: Whether to exclude the records of the last request. The default + value is true. Type: boolean (or Expression with resultType boolean). + :type exclude_last_request: object + :param base_request_id: The ID of request for delta loading. Once it is set, only data with + requestId larger than the value of this property will be retrieved. The default value is 0. + Type: integer (or Expression with resultType integer ). + :type base_request_id: object """ _validation = { 'type': {'required': True}, + 'linked_service_name': {'required': True}, + 'open_hub_destination_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'environment_url': {'key': 'typeProperties.environmentUrl', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'security_token': {'key': 'typeProperties.securityToken', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'open_hub_destination_name': {'key': 'typeProperties.openHubDestinationName', 'type': 'object'}, + 'exclude_last_request': {'key': 'typeProperties.excludeLastRequest', 'type': 'object'}, + 'base_request_id': {'key': 'typeProperties.baseRequestId', 'type': 'object'}, } def __init__( self, *, + linked_service_name: "LinkedServiceReference", + open_hub_destination_name: object, additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, - environment_url: Optional[object] = None, - username: Optional[object] = None, - password: Optional["SecretBase"] = None, - security_token: Optional["SecretBase"] = None, - encrypted_credential: Optional[object] = None, + folder: Optional["DatasetFolder"] = None, + exclude_last_request: Optional[object] = None, + base_request_id: Optional[object] = None, **kwargs ): - super(SalesforceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Salesforce' - self.environment_url = environment_url - self.username = username - self.password = password - self.security_token = security_token - self.encrypted_credential = encrypted_credential + super(SapOpenHubTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'SapOpenHubTable' # type: str + self.open_hub_destination_name = open_hub_destination_name + self.exclude_last_request = exclude_last_request + self.base_request_id = base_request_id -class SalesforceMarketingCloudLinkedService(LinkedService): - """Salesforce Marketing Cloud linked service. +class SapTableLinkedService(LinkedService): + """SAP Table Linked Service. All required parameters must be populated in order to send to Azure. @@ -17474,23 +28857,52 @@ class SalesforceMarketingCloudLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param client_id: Required. The client ID associated with the Salesforce Marketing Cloud - application. Type: string (or Expression with resultType string). + :param server: Host name of the SAP instance where the table is located. Type: string (or + Expression with resultType string). + :type server: object + :param system_number: System number of the SAP system where the table is located. (Usually a + two-digit decimal number represented as a string.) Type: string (or Expression with resultType + string). + :type system_number: object + :param client_id: Client ID of the client on the SAP system where the table is located. + (Usually a three-digit decimal number represented as a string) Type: string (or Expression with + resultType string). :type client_id: object - :param client_secret: The client secret associated with the Salesforce Marketing Cloud - application. Type: string (or Expression with resultType string). - :type client_secret: ~azure.synapse.artifacts.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using - HTTPS. The default value is true. Type: boolean (or Expression with resultType boolean). - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name in the server's - certificate to match the host name of the server when connecting over SSL. The default value is - true. Type: boolean (or Expression with resultType boolean). - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of the server when - connecting over SSL. The default value is true. Type: boolean (or Expression with resultType - boolean). - :type use_peer_verification: object + :param language: Language of the SAP system where the table is located. The default value is + EN. Type: string (or Expression with resultType string). + :type language: object + :param system_id: SystemID of the SAP system where the table is located. Type: string (or + Expression with resultType string). + :type system_id: object + :param user_name: Username to access the SAP server where the table is located. Type: string + (or Expression with resultType string). + :type user_name: object + :param password: Password to access the SAP server where the table is located. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param message_server: The hostname of the SAP Message Server. Type: string (or Expression with + resultType string). + :type message_server: object + :param message_server_service: The service name or port number of the Message Server. Type: + string (or Expression with resultType string). + :type message_server_service: object + :param snc_mode: SNC activation indicator to access the SAP server where the table is located. + Must be either 0 (off) or 1 (on). Type: string (or Expression with resultType string). + :type snc_mode: object + :param snc_my_name: Initiator's SNC name to access the SAP server where the table is located. + Type: string (or Expression with resultType string). + :type snc_my_name: object + :param snc_partner_name: Communication partner's SNC name to access the SAP server where the + table is located. Type: string (or Expression with resultType string). + :type snc_partner_name: object + :param snc_library_path: External security product's library to access the SAP server where the + table is located. Type: string (or Expression with resultType string). + :type snc_library_path: object + :param snc_qop: SNC Quality of Protection. Allowed value include: 1, 2, 3, 8, 9. Type: string + (or Expression with resultType string). + :type snc_qop: object + :param logon_group: The Logon Group for the SAP System. Type: string (or Expression with + resultType string). + :type logon_group: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -17499,7 +28911,6 @@ class SalesforceMarketingCloudLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'client_id': {'required': True}, } _attribute_map = { @@ -17509,42 +28920,114 @@ class SalesforceMarketingCloudLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'system_number': {'key': 'typeProperties.systemNumber', 'type': 'object'}, 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'language': {'key': 'typeProperties.language', 'type': 'object'}, + 'system_id': {'key': 'typeProperties.systemId', 'type': 'object'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'message_server': {'key': 'typeProperties.messageServer', 'type': 'object'}, + 'message_server_service': {'key': 'typeProperties.messageServerService', 'type': 'object'}, + 'snc_mode': {'key': 'typeProperties.sncMode', 'type': 'object'}, + 'snc_my_name': {'key': 'typeProperties.sncMyName', 'type': 'object'}, + 'snc_partner_name': {'key': 'typeProperties.sncPartnerName', 'type': 'object'}, + 'snc_library_path': {'key': 'typeProperties.sncLibraryPath', 'type': 'object'}, + 'snc_qop': {'key': 'typeProperties.sncQop', 'type': 'object'}, + 'logon_group': {'key': 'typeProperties.logonGroup', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, *, - client_id: object, additional_properties: Optional[Dict[str, object]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, - client_secret: Optional["SecretBase"] = None, - use_encrypted_endpoints: Optional[object] = None, - use_host_verification: Optional[object] = None, - use_peer_verification: Optional[object] = None, + server: Optional[object] = None, + system_number: Optional[object] = None, + client_id: Optional[object] = None, + language: Optional[object] = None, + system_id: Optional[object] = None, + user_name: Optional[object] = None, + password: Optional["SecretBase"] = None, + message_server: Optional[object] = None, + message_server_service: Optional[object] = None, + snc_mode: Optional[object] = None, + snc_my_name: Optional[object] = None, + snc_partner_name: Optional[object] = None, + snc_library_path: Optional[object] = None, + snc_qop: Optional[object] = None, + logon_group: Optional[object] = None, encrypted_credential: Optional[object] = None, **kwargs ): - super(SalesforceMarketingCloudLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'SalesforceMarketingCloud' + super(SapTableLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'SapTable' # type: str + self.server = server + self.system_number = system_number self.client_id = client_id - self.client_secret = client_secret - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification + self.language = language + self.system_id = system_id + self.user_name = user_name + self.password = password + self.message_server = message_server + self.message_server_service = message_server_service + self.snc_mode = snc_mode + self.snc_my_name = snc_my_name + self.snc_partner_name = snc_partner_name + self.snc_library_path = snc_library_path + self.snc_qop = snc_qop + self.logon_group = logon_group self.encrypted_credential = encrypted_credential -class SalesforceMarketingCloudObjectDataset(Dataset): - """Salesforce Marketing Cloud dataset. +class SapTablePartitionSettings(msrest.serialization.Model): + """The settings that will be leveraged for SAP table source partitioning. + + :param partition_column_name: The name of the column that will be used for proceeding range + partitioning. Type: string (or Expression with resultType string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of column specified in partitionColumnName that + will be used for proceeding range partitioning. Type: string (or Expression with resultType + string). + :type partition_upper_bound: object + :param partition_lower_bound: The minimum value of column specified in partitionColumnName that + will be used for proceeding range partitioning. Type: string (or Expression with resultType + string). + :type partition_lower_bound: object + :param max_partitions_number: The maximum value of partitions the table will be split into. + Type: integer (or Expression with resultType string). + :type max_partitions_number: object + """ + + _attribute_map = { + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + 'max_partitions_number': {'key': 'maxPartitionsNumber', 'type': 'object'}, + } + + def __init__( + self, + *, + partition_column_name: Optional[object] = None, + partition_upper_bound: Optional[object] = None, + partition_lower_bound: Optional[object] = None, + max_partitions_number: Optional[object] = None, + **kwargs + ): + super(SapTablePartitionSettings, self).__init__(**kwargs) + self.partition_column_name = partition_column_name + self.partition_upper_bound = partition_upper_bound + self.partition_lower_bound = partition_lower_bound + self.max_partitions_number = max_partitions_number + + +class SapTableResourceDataset(Dataset): + """SAP Table Resource properties. All required parameters must be populated in order to send to Azure. @@ -17570,13 +29053,15 @@ class SalesforceMarketingCloudObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). + :param table_name: Required. The name of the SAP Table. Type: string (or Expression with + resultType string). :type table_name: object """ _validation = { 'type': {'required': True}, 'linked_service_name': {'required': True}, + 'table_name': {'required': True}, } _attribute_map = { @@ -17596,6 +29081,7 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", + table_name: object, additional_properties: Optional[Dict[str, object]] = None, description: Optional[str] = None, structure: Optional[object] = None, @@ -17603,387 +29089,382 @@ def __init__( parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, **kwargs ): - super(SalesforceMarketingCloudObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'SalesforceMarketingCloudObject' + super(SapTableResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'SapTableResource' # type: str self.table_name = table_name -class SalesforceObjectDataset(Dataset): - """The Salesforce object dataset. +class SapTableSource(TabularSource): + """A copy activity source for SAP Table source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param object_api_name: The Salesforce object API name. Type: string (or Expression with - resultType string). - :type object_api_name: object + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param row_count: The number of rows to be retrieved. Type: integer(or Expression with + resultType integer). + :type row_count: object + :param row_skips: The number of rows that will be skipped. Type: integer (or Expression with + resultType integer). + :type row_skips: object + :param rfc_table_fields: The fields of the SAP table that will be retrieved. For example, + column0, column1. Type: string (or Expression with resultType string). + :type rfc_table_fields: object + :param rfc_table_options: The options for the filtering of the SAP Table. For example, COLUMN0 + EQ SOME VALUE. Type: string (or Expression with resultType string). + :type rfc_table_options: object + :param batch_size: Specifies the maximum number of rows that will be retrieved at a time when + retrieving data from SAP Table. Type: integer (or Expression with resultType integer). + :type batch_size: object + :param custom_rfc_read_table_function_module: Specifies the custom RFC function module that + will be used to read data from SAP Table. Type: string (or Expression with resultType string). + :type custom_rfc_read_table_function_module: object + :param partition_option: The partition mechanism that will be used for SAP table read in + parallel. Possible values include: "None", "PartitionOnInt", "PartitionOnCalendarYear", + "PartitionOnCalendarMonth", "PartitionOnCalendarDate", "PartitionOnTime". + :type partition_option: str or ~azure.synapse.artifacts.models.SapTablePartitionOption + :param partition_settings: The settings that will be leveraged for SAP table source + partitioning. + :type partition_settings: ~azure.synapse.artifacts.models.SapTablePartitionSettings """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'object_api_name': {'key': 'typeProperties.objectApiName', 'type': 'object'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'row_count': {'key': 'rowCount', 'type': 'object'}, + 'row_skips': {'key': 'rowSkips', 'type': 'object'}, + 'rfc_table_fields': {'key': 'rfcTableFields', 'type': 'object'}, + 'rfc_table_options': {'key': 'rfcTableOptions', 'type': 'object'}, + 'batch_size': {'key': 'batchSize', 'type': 'object'}, + 'custom_rfc_read_table_function_module': {'key': 'customRfcReadTableFunctionModule', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'SapTablePartitionSettings'}, } def __init__( self, *, - linked_service_name: "LinkedServiceReference", additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, - object_api_name: Optional[object] = None, - **kwargs - ): - super(SalesforceObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'SalesforceObject' - self.object_api_name = object_api_name - + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + row_count: Optional[object] = None, + row_skips: Optional[object] = None, + rfc_table_fields: Optional[object] = None, + rfc_table_options: Optional[object] = None, + batch_size: Optional[object] = None, + custom_rfc_read_table_function_module: Optional[object] = None, + partition_option: Optional[Union[str, "SapTablePartitionOption"]] = None, + partition_settings: Optional["SapTablePartitionSettings"] = None, + **kwargs + ): + super(SapTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'SapTableSource' # type: str + self.row_count = row_count + self.row_skips = row_skips + self.rfc_table_fields = rfc_table_fields + self.rfc_table_options = rfc_table_options + self.batch_size = batch_size + self.custom_rfc_read_table_function_module = custom_rfc_read_table_function_module + self.partition_option = partition_option + self.partition_settings = partition_settings + + +class ScheduleTrigger(MultiplePipelineTrigger): + """Trigger that creates pipeline runs periodically, on schedule. -class SalesforceServiceCloudLinkedService(LinkedService): - """Linked service for Salesforce Service Cloud. + Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Trigger type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. + :param description: Trigger description. :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. + :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are + called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". + :vartype runtime_state: str or ~azure.synapse.artifacts.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the trigger. :type annotations: list[object] - :param environment_url: The URL of Salesforce Service Cloud instance. Default is - 'https://login.salesforce.com'. To copy data from sandbox, specify - 'https://test.salesforce.com'. To copy data from custom domain, specify, for example, - 'https://[domain].my.salesforce.com'. Type: string (or Expression with resultType string). - :type environment_url: object - :param username: The username for Basic authentication of the Salesforce instance. Type: string - (or Expression with resultType string). - :type username: object - :param password: The password for Basic authentication of the Salesforce instance. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param security_token: The security token is required to remotely access Salesforce instance. - :type security_token: ~azure.synapse.artifacts.models.SecretBase - :param extended_properties: Extended properties appended to the connection string. Type: string - (or Expression with resultType string). - :type extended_properties: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param pipelines: Pipelines that need to be started. + :type pipelines: list[~azure.synapse.artifacts.models.TriggerPipelineReference] + :param recurrence: Required. Recurrence schedule configuration. + :type recurrence: ~azure.synapse.artifacts.models.ScheduleTriggerRecurrence """ _validation = { 'type': {'required': True}, + 'runtime_state': {'readonly': True}, + 'recurrence': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'environment_url': {'key': 'typeProperties.environmentUrl', 'type': 'object'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'security_token': {'key': 'typeProperties.securityToken', 'type': 'SecretBase'}, - 'extended_properties': {'key': 'typeProperties.extendedProperties', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, + 'recurrence': {'key': 'typeProperties.recurrence', 'type': 'ScheduleTriggerRecurrence'}, } def __init__( self, *, + recurrence: "ScheduleTriggerRecurrence", additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, - environment_url: Optional[object] = None, - username: Optional[object] = None, - password: Optional["SecretBase"] = None, - security_token: Optional["SecretBase"] = None, - extended_properties: Optional[object] = None, - encrypted_credential: Optional[object] = None, + pipelines: Optional[List["TriggerPipelineReference"]] = None, + **kwargs + ): + super(ScheduleTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, pipelines=pipelines, **kwargs) + self.type = 'ScheduleTrigger' # type: str + self.recurrence = recurrence + + +class ScheduleTriggerRecurrence(msrest.serialization.Model): + """The workflow trigger recurrence. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param frequency: The frequency. Possible values include: "NotSpecified", "Minute", "Hour", + "Day", "Week", "Month", "Year". + :type frequency: str or ~azure.synapse.artifacts.models.RecurrenceFrequency + :param interval: The interval. + :type interval: int + :param start_time: The start time. + :type start_time: ~datetime.datetime + :param end_time: The end time. + :type end_time: ~datetime.datetime + :param time_zone: The time zone. + :type time_zone: str + :param schedule: The recurrence schedule. + :type schedule: ~azure.synapse.artifacts.models.RecurrenceSchedule + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'frequency': {'key': 'frequency', 'type': 'str'}, + 'interval': {'key': 'interval', 'type': 'int'}, + 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, + 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, + 'time_zone': {'key': 'timeZone', 'type': 'str'}, + 'schedule': {'key': 'schedule', 'type': 'RecurrenceSchedule'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + frequency: Optional[Union[str, "RecurrenceFrequency"]] = None, + interval: Optional[int] = None, + start_time: Optional[datetime.datetime] = None, + end_time: Optional[datetime.datetime] = None, + time_zone: Optional[str] = None, + schedule: Optional["RecurrenceSchedule"] = None, + **kwargs + ): + super(ScheduleTriggerRecurrence, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.frequency = frequency + self.interval = interval + self.start_time = start_time + self.end_time = end_time + self.time_zone = time_zone + self.schedule = schedule + + +class ScriptAction(msrest.serialization.Model): + """Custom script action to run on HDI ondemand cluster once it's up. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. The user provided name of the script action. + :type name: str + :param uri: Required. The URI for the script action. + :type uri: str + :param roles: Required. The node types on which the script action should be executed. Possible + values include: "Headnode", "Workernode", "Zookeeper". + :type roles: str or ~azure.synapse.artifacts.models.HdiNodeTypes + :param parameters: The parameters for the script action. + :type parameters: str + """ + + _validation = { + 'name': {'required': True}, + 'uri': {'required': True}, + 'roles': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'uri': {'key': 'uri', 'type': 'str'}, + 'roles': {'key': 'roles', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': 'str'}, + } + + def __init__( + self, + *, + name: str, + uri: str, + roles: Union[str, "HdiNodeTypes"], + parameters: Optional[str] = None, **kwargs ): - super(SalesforceServiceCloudLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'SalesforceServiceCloud' - self.environment_url = environment_url - self.username = username - self.password = password - self.security_token = security_token - self.extended_properties = extended_properties - self.encrypted_credential = encrypted_credential + super(ScriptAction, self).__init__(**kwargs) + self.name = name + self.uri = uri + self.roles = roles + self.parameters = parameters -class SalesforceServiceCloudObjectDataset(Dataset): - """The Salesforce Service Cloud object dataset. +class SecureString(SecretBase): + """Azure Synapse secure string definition. The string value will be masked with asterisks '*' during Get or List API calls. All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Type of the secret.Constant filled by server. :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param object_api_name: The Salesforce Service Cloud object API name. Type: string (or - Expression with resultType string). - :type object_api_name: object + :param value: Required. Value of secure string. + :type value: str """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, + 'value': {'required': True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'object_api_name': {'key': 'typeProperties.objectApiName', 'type': 'object'}, + 'value': {'key': 'value', 'type': 'str'}, } def __init__( self, *, - linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, - object_api_name: Optional[object] = None, + value: str, **kwargs ): - super(SalesforceServiceCloudObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'SalesforceServiceCloudObject' - self.object_api_name = object_api_name + super(SecureString, self).__init__(**kwargs) + self.type = 'SecureString' # type: str + self.value = value -class SapBwCubeDataset(Dataset): - """The SAP BW cube dataset. +class SelfDependencyTumblingWindowTriggerReference(DependencyReference): + """Self referenced tumbling window trigger dependency. All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. The type of dependency reference.Constant filled by server. :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param offset: Required. Timespan applied to the start time of a tumbling window when + evaluating dependency. + :type offset: str + :param size: The size of the window when evaluating the dependency. If undefined the frequency + of the tumbling window will be used. + :type size: str """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, + 'offset': {'required': True, 'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, + 'size': {'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'offset': {'key': 'offset', 'type': 'str'}, + 'size': {'key': 'size', 'type': 'str'}, } def __init__( self, *, - linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, + offset: str, + size: Optional[str] = None, **kwargs ): - super(SapBwCubeDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'SapBwCube' + super(SelfDependencyTumblingWindowTriggerReference, self).__init__(**kwargs) + self.type = 'SelfDependencyTumblingWindowTriggerReference' # type: str + self.offset = offset + self.size = size -class SapBWLinkedService(LinkedService): - """SAP Business Warehouse Linked Service. +class SelfHostedIntegrationRuntime(IntegrationRuntime): + """Self-hosted integration runtime. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. + :param type: Required. Type of integration runtime.Constant filled by server. Possible values + include: "Managed", "SelfHosted". + :type type: str or ~azure.synapse.artifacts.models.IntegrationRuntimeType + :param description: Integration runtime description. :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param server: Required. Host name of the SAP BW instance. Type: string (or Expression with - resultType string). - :type server: object - :param system_number: Required. System number of the BW system. (Usually a two-digit decimal - number represented as a string.) Type: string (or Expression with resultType string). - :type system_number: object - :param client_id: Required. Client ID of the client on the BW system. (Usually a three-digit - decimal number represented as a string) Type: string (or Expression with resultType string). - :type client_id: object - :param user_name: Username to access the SAP BW server. Type: string (or Expression with - resultType string). - :type user_name: object - :param password: Password to access the SAP BW server. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param linked_info: The base definition of a linked integration runtime. + :type linked_info: ~azure.synapse.artifacts.models.LinkedIntegrationRuntimeType """ _validation = { 'type': {'required': True}, - 'server': {'required': True}, - 'system_number': {'required': True}, - 'client_id': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'system_number': {'key': 'typeProperties.systemNumber', 'type': 'object'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'linked_info': {'key': 'typeProperties.linkedInfo', 'type': 'LinkedIntegrationRuntimeType'}, } def __init__( self, *, - server: object, - system_number: object, - client_id: object, additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - user_name: Optional[object] = None, - password: Optional["SecretBase"] = None, - encrypted_credential: Optional[object] = None, + linked_info: Optional["LinkedIntegrationRuntimeType"] = None, **kwargs ): - super(SapBWLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'SapBW' - self.server = server - self.system_number = system_number - self.client_id = client_id - self.user_name = user_name - self.password = password - self.encrypted_credential = encrypted_credential + super(SelfHostedIntegrationRuntime, self).__init__(additional_properties=additional_properties, description=description, **kwargs) + self.type = 'SelfHosted' # type: str + self.linked_info = linked_info -class SapCloudForCustomerLinkedService(LinkedService): - """Linked service for SAP Cloud for Customer. +class ServiceNowLinkedService(LinkedService): + """ServiceNow server linked service. All required parameters must be populated in order to send to Azure. @@ -18000,24 +29481,42 @@ class SapCloudForCustomerLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param url: Required. The URL of SAP Cloud for Customer OData API. For example, - '[https://[tenantname].crm.ondemand.com/sap/c4c/odata/v1]'. Type: string (or Expression with - resultType string). - :type url: object - :param username: The username for Basic authentication. Type: string (or Expression with - resultType string). + :param endpoint: Required. The endpoint of the ServiceNow server. (i.e. + :code:``.service-now.com). + :type endpoint: object + :param authentication_type: Required. The authentication type to use. Possible values include: + "Basic", "OAuth2". + :type authentication_type: str or ~azure.synapse.artifacts.models.ServiceNowAuthenticationType + :param username: The user name used to connect to the ServiceNow server for Basic and OAuth2 + authentication. :type username: object - :param password: The password for Basic authentication. + :param password: The password corresponding to the user name for Basic and OAuth2 + authentication. :type password: ~azure.synapse.artifacts.models.SecretBase + :param client_id: The client id for OAuth2 authentication. + :type client_id: object + :param client_secret: The client secret for OAuth2 authentication. + :type client_secret: ~azure.synapse.artifacts.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using + HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name in the server's + certificate to match the host name of the server when connecting over SSL. The default value is + true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of the server when + connecting over SSL. The default value is true. + :type use_peer_verification: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Either encryptedCredential or - username/password must be provided. Type: string (or Expression with resultType string). + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, - 'url': {'required': True}, + 'endpoint': {'required': True}, + 'authentication_type': {'required': True}, } _attribute_map = { @@ -18027,16 +29526,23 @@ class SapCloudForCustomerLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, *, - url: object, + endpoint: object, + authentication_type: Union[str, "ServiceNowAuthenticationType"], additional_properties: Optional[Dict[str, object]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, @@ -18044,19 +29550,30 @@ def __init__( annotations: Optional[List[object]] = None, username: Optional[object] = None, password: Optional["SecretBase"] = None, + client_id: Optional[object] = None, + client_secret: Optional["SecretBase"] = None, + use_encrypted_endpoints: Optional[object] = None, + use_host_verification: Optional[object] = None, + use_peer_verification: Optional[object] = None, encrypted_credential: Optional[object] = None, **kwargs ): - super(SapCloudForCustomerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'SapCloudForCustomer' - self.url = url + super(ServiceNowLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'ServiceNow' # type: str + self.endpoint = endpoint + self.authentication_type = authentication_type self.username = username self.password = password + self.client_id = client_id + self.client_secret = client_secret + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification self.encrypted_credential = encrypted_credential -class SapCloudForCustomerResourceDataset(Dataset): - """The path of the SAP Cloud for Customer OData entity. +class ServiceNowObjectDataset(Dataset): + """ServiceNow server dataset. All required parameters must be populated in order to send to Azure. @@ -18081,16 +29598,14 @@ class SapCloudForCustomerResourceDataset(Dataset): :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param path: Required. The path of the SAP Cloud for Customer OData entity. Type: string (or - Expression with resultType string). - :type path: object + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table_name: The table name. Type: string (or Expression with resultType string). + :type table_name: object """ _validation = { 'type': {'required': True}, 'linked_service_name': {'required': True}, - 'path': {'required': True}, } _attribute_map = { @@ -18103,14 +29618,13 @@ class SapCloudForCustomerResourceDataset(Dataset): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'path': {'key': 'typeProperties.path', 'type': 'object'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } def __init__( self, *, linked_service_name: "LinkedServiceReference", - path: object, additional_properties: Optional[Dict[str, object]] = None, description: Optional[str] = None, structure: Optional[object] = None, @@ -18118,316 +29632,238 @@ def __init__( parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, folder: Optional["DatasetFolder"] = None, + table_name: Optional[object] = None, **kwargs ): - super(SapCloudForCustomerResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'SapCloudForCustomerResource' - self.path = path + super(ServiceNowObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'ServiceNowObject' # type: str + self.table_name = table_name -class SapEccLinkedService(LinkedService): - """Linked service for SAP ERP Central Component(SAP ECC). +class ServiceNowSource(TabularSource): + """A copy activity ServiceNow server source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param url: Required. The URL of SAP ECC OData API. For example, - '[https://hostname:port/sap/opu/odata/sap/servicename/]'. Type: string (or Expression with - resultType string). - :type url: str - :param username: The username for Basic authentication. Type: string (or Expression with - resultType string). - :type username: str - :param password: The password for Basic authentication. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Either encryptedCredential or - username/password must be provided. Type: string (or Expression with resultType string). - :type encrypted_credential: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object """ _validation = { 'type': {'required': True}, - 'url': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'url': {'key': 'typeProperties.url', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'str'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, } def __init__( self, *, - url: str, additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, - description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - username: Optional[str] = None, - password: Optional["SecretBase"] = None, - encrypted_credential: Optional[str] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, **kwargs ): - super(SapEccLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'SapEcc' - self.url = url - self.username = username - self.password = password - self.encrypted_credential = encrypted_credential + super(ServiceNowSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'ServiceNowSource' # type: str + self.query = query -class SapEccResourceDataset(Dataset): - """The path of the SAP ECC OData entity. +class SetVariableActivity(Activity): + """Set value for a Variable. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. :type type: str - :param description: Dataset description. + :param description: Activity description. :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param path: Required. The path of the SAP ECC OData entity. Type: string (or Expression with - resultType string). - :type path: object + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param variable_name: Name of the variable whose value needs to be set. + :type variable_name: str + :param value: Value to be set. Could be a static value or Expression. + :type value: object """ _validation = { + 'name': {'required': True}, 'type': {'required': True}, - 'linked_service_name': {'required': True}, - 'path': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'path': {'key': 'typeProperties.path', 'type': 'object'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'variable_name': {'key': 'typeProperties.variableName', 'type': 'str'}, + 'value': {'key': 'typeProperties.value', 'type': 'object'}, } def __init__( self, *, - linked_service_name: "LinkedServiceReference", - path: object, + name: str, additional_properties: Optional[Dict[str, object]] = None, description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, + depends_on: Optional[List["ActivityDependency"]] = None, + user_properties: Optional[List["UserProperty"]] = None, + variable_name: Optional[str] = None, + value: Optional[object] = None, **kwargs ): - super(SapEccResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'SapEccResource' - self.path = path + super(SetVariableActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.type = 'SetVariable' # type: str + self.variable_name = variable_name + self.value = value -class SapHanaLinkedService(LinkedService): - """SAP HANA Linked Service. +class SftpLocation(DatasetLocation): + """The location of SFTP dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Type of dataset storage location.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: SAP HANA ODBC connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param server: Required. Host name of the SAP HANA server. Type: string (or Expression with - resultType string). - :type server: object - :param authentication_type: The authentication type to be used to connect to the SAP HANA - server. Possible values include: "Basic", "Windows". - :type authentication_type: str or ~azure.synapse.artifacts.models.SapHanaAuthenticationType - :param user_name: Username to access the SAP HANA server. Type: string (or Expression with - resultType string). - :type user_name: object - :param password: Password to access the SAP HANA server. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with + :param folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + string). + :type file_name: object """ _validation = { 'type': {'required': True}, - 'server': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, } def __init__( self, *, - server: object, additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, - description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - connection_string: Optional[object] = None, - authentication_type: Optional[Union[str, "SapHanaAuthenticationType"]] = None, - user_name: Optional[object] = None, - password: Optional["SecretBase"] = None, - encrypted_credential: Optional[object] = None, + folder_path: Optional[object] = None, + file_name: Optional[object] = None, **kwargs ): - super(SapHanaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'SapHana' - self.connection_string = connection_string - self.server = server - self.authentication_type = authentication_type - self.user_name = user_name - self.password = password - self.encrypted_credential = encrypted_credential + super(SftpLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) + self.type = 'SftpLocation' # type: str -class SapHanaTableDataset(Dataset): - """SAP HANA Table properties. +class SftpReadSettings(StoreReadSettings): + """Sftp read settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. The read setting type.Constant filled by server. :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param schema_type_properties_schema: The schema name of SAP HANA. Type: string (or Expression + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :type recursive: object + :param wildcard_folder_path: Sftp wildcardFolderPath. Type: string (or Expression with + resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Sftp wildcardFileName. Type: string (or Expression with resultType + string). + :type wildcard_file_name: object + :param modified_datetime_start: The start of file's modified datetime. Type: string (or + Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). - :type schema_type_properties_schema: object - :param table: The table name of SAP HANA. Type: string (or Expression with resultType string). - :type table: object + :type modified_datetime_end: object """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, } - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } def __init__( self, *, - linked_service_name: "LinkedServiceReference", additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, - schema_type_properties_schema: Optional[object] = None, - table: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + recursive: Optional[object] = None, + wildcard_folder_path: Optional[object] = None, + wildcard_file_name: Optional[object] = None, + modified_datetime_start: Optional[object] = None, + modified_datetime_end: Optional[object] = None, **kwargs ): - super(SapHanaTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'SapHanaTable' - self.schema_type_properties_schema = schema_type_properties_schema - self.table = table + super(SftpReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'SftpReadSettings' # type: str + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end -class SapOpenHubLinkedService(LinkedService): - """SAP Business Warehouse Open Hub Destination Linked Service. +class SftpServerLinkedService(LinkedService): + """A linked service for an SSH File Transfer Protocol (SFTP) server. All required parameters must be populated in order to send to Azure. @@ -18444,37 +29880,48 @@ class SapOpenHubLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param server: Required. Host name of the SAP BW instance where the open hub destination is - located. Type: string (or Expression with resultType string). - :type server: object - :param system_number: Required. System number of the BW system where the open hub destination - is located. (Usually a two-digit decimal number represented as a string.) Type: string (or - Expression with resultType string). - :type system_number: object - :param client_id: Required. Client ID of the client on the BW system where the open hub - destination is located. (Usually a three-digit decimal number represented as a string) Type: - string (or Expression with resultType string). - :type client_id: object - :param language: Language of the BW system where the open hub destination is located. The - default value is EN. Type: string (or Expression with resultType string). - :type language: object - :param user_name: Username to access the SAP BW server where the open hub destination is - located. Type: string (or Expression with resultType string). + :param host: Required. The SFTP server host name. Type: string (or Expression with resultType + string). + :type host: object + :param port: The TCP port number that the SFTP server uses to listen for client connections. + Default value is 22. Type: integer (or Expression with resultType integer), minimum: 0. + :type port: object + :param authentication_type: The authentication type to be used to connect to the FTP server. + Possible values include: "Basic", "SshPublicKey". + :type authentication_type: str or ~azure.synapse.artifacts.models.SftpAuthenticationType + :param user_name: The username used to log on to the SFTP server. Type: string (or Expression + with resultType string). :type user_name: object - :param password: Password to access the SAP BW server where the open hub destination is - located. + :param password: Password to logon the SFTP server for Basic authentication. :type password: ~azure.synapse.artifacts.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object + :param private_key_path: The SSH private key file path for SshPublicKey authentication. Only + valid for on-premises copy. For on-premises copy with SshPublicKey authentication, either + PrivateKeyPath or PrivateKeyContent should be specified. SSH private key should be OpenSSH + format. Type: string (or Expression with resultType string). + :type private_key_path: object + :param private_key_content: Base64 encoded SSH private key content for SshPublicKey + authentication. For on-premises copy with SshPublicKey authentication, either PrivateKeyPath or + PrivateKeyContent should be specified. SSH private key should be OpenSSH format. + :type private_key_content: ~azure.synapse.artifacts.models.SecretBase + :param pass_phrase: The password to decrypt the SSH private key if the SSH private key is + encrypted. + :type pass_phrase: ~azure.synapse.artifacts.models.SecretBase + :param skip_host_key_validation: If true, skip the SSH host key validation. Default value is + false. Type: boolean (or Expression with resultType boolean). + :type skip_host_key_validation: object + :param host_key_fingerprint: The host key finger-print of the SFTP server. When + SkipHostKeyValidation is false, HostKeyFingerprint should be specified. Type: string (or + Expression with resultType string). + :type host_key_fingerprint: object """ _validation = { 'type': {'required': True}, - 'server': {'required': True}, - 'system_number': {'required': True}, - 'client_id': {'required': True}, + 'host': {'required': True}, } _attribute_map = { @@ -18484,190 +29931,134 @@ class SapOpenHubLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'system_number': {'key': 'typeProperties.systemNumber', 'type': 'object'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'language': {'key': 'typeProperties.language', 'type': 'object'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'private_key_path': {'key': 'typeProperties.privateKeyPath', 'type': 'object'}, + 'private_key_content': {'key': 'typeProperties.privateKeyContent', 'type': 'SecretBase'}, + 'pass_phrase': {'key': 'typeProperties.passPhrase', 'type': 'SecretBase'}, + 'skip_host_key_validation': {'key': 'typeProperties.skipHostKeyValidation', 'type': 'object'}, + 'host_key_fingerprint': {'key': 'typeProperties.hostKeyFingerprint', 'type': 'object'}, } def __init__( self, *, - server: object, - system_number: object, - client_id: object, + host: object, additional_properties: Optional[Dict[str, object]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, - language: Optional[object] = None, + port: Optional[object] = None, + authentication_type: Optional[Union[str, "SftpAuthenticationType"]] = None, user_name: Optional[object] = None, password: Optional["SecretBase"] = None, encrypted_credential: Optional[object] = None, + private_key_path: Optional[object] = None, + private_key_content: Optional["SecretBase"] = None, + pass_phrase: Optional["SecretBase"] = None, + skip_host_key_validation: Optional[object] = None, + host_key_fingerprint: Optional[object] = None, **kwargs ): - super(SapOpenHubLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'SapOpenHub' - self.server = server - self.system_number = system_number - self.client_id = client_id - self.language = language + super(SftpServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'Sftp' # type: str + self.host = host + self.port = port + self.authentication_type = authentication_type self.user_name = user_name self.password = password self.encrypted_credential = encrypted_credential + self.private_key_path = private_key_path + self.private_key_content = private_key_content + self.pass_phrase = pass_phrase + self.skip_host_key_validation = skip_host_key_validation + self.host_key_fingerprint = host_key_fingerprint -class SapOpenHubTableDataset(Dataset): - """Sap Business Warehouse Open Hub Destination Table properties. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param open_hub_destination_name: Required. The name of the Open Hub Destination with - destination type as Database Table. Type: string (or Expression with resultType string). - :type open_hub_destination_name: object - :param exclude_last_request: Whether to exclude the records of the last request. The default - value is true. Type: boolean (or Expression with resultType boolean). - :type exclude_last_request: object - :param base_request_id: The ID of request for delta loading. Once it is set, only data with - requestId larger than the value of this property will be retrieved. The default value is 0. - Type: integer (or Expression with resultType integer ). - :type base_request_id: object - """ - - _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, - 'open_hub_destination_name': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'open_hub_destination_name': {'key': 'typeProperties.openHubDestinationName', 'type': 'object'}, - 'exclude_last_request': {'key': 'typeProperties.excludeLastRequest', 'type': 'object'}, - 'base_request_id': {'key': 'typeProperties.baseRequestId', 'type': 'object'}, - } - - def __init__( - self, - *, - linked_service_name: "LinkedServiceReference", - open_hub_destination_name: object, - additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, - exclude_last_request: Optional[object] = None, - base_request_id: Optional[object] = None, - **kwargs - ): - super(SapOpenHubTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'SapOpenHubTable' - self.open_hub_destination_name = open_hub_destination_name - self.exclude_last_request = exclude_last_request - self.base_request_id = base_request_id - - -class SapTableLinkedService(LinkedService): - """SAP Table Linked Service. +class SftpWriteSettings(StoreWriteSettings): + """Sftp write settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. The write setting type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param server: Host name of the SAP instance where the table is located. Type: string (or - Expression with resultType string). - :type server: object - :param system_number: System number of the SAP system where the table is located. (Usually a - two-digit decimal number represented as a string.) Type: string (or Expression with resultType - string). - :type system_number: object - :param client_id: Client ID of the client on the SAP system where the table is located. - (Usually a three-digit decimal number represented as a string) Type: string (or Expression with - resultType string). - :type client_id: object - :param language: Language of the SAP system where the table is located. The default value is - EN. Type: string (or Expression with resultType string). - :type language: object - :param system_id: SystemID of the SAP system where the table is located. Type: string (or - Expression with resultType string). - :type system_id: object - :param user_name: Username to access the SAP server where the table is located. Type: string - (or Expression with resultType string). - :type user_name: object - :param password: Password to access the SAP server where the table is located. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param message_server: The hostname of the SAP Message Server. Type: string (or Expression with - resultType string). - :type message_server: object - :param message_server_service: The service name or port number of the Message Server. Type: - string (or Expression with resultType string). - :type message_server_service: object - :param snc_mode: SNC activation indicator to access the SAP server where the table is located. - Must be either 0 (off) or 1 (on). Type: string (or Expression with resultType string). - :type snc_mode: object - :param snc_my_name: Initiator's SNC name to access the SAP server where the table is located. - Type: string (or Expression with resultType string). - :type snc_my_name: object - :param snc_partner_name: Communication partner's SNC name to access the SAP server where the - table is located. Type: string (or Expression with resultType string). - :type snc_partner_name: object - :param snc_library_path: External security product's library to access the SAP server where the - table is located. Type: string (or Expression with resultType string). - :type snc_library_path: object - :param snc_qop: SNC Quality of Protection. Allowed value include: 1, 2, 3, 8, 9. Type: string - (or Expression with resultType string). - :type snc_qop: object - :param logon_group: The Logon Group for the SAP System. Type: string (or Expression with - resultType string). - :type logon_group: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + :param operation_timeout: Specifies the timeout for writing each chunk to SFTP server. Default + value: 01:00:00 (one hour). Type: string (or Expression with resultType string). + :type operation_timeout: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'operation_timeout': {'key': 'operationTimeout', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + max_concurrent_connections: Optional[object] = None, + copy_behavior: Optional[object] = None, + operation_timeout: Optional[object] = None, + **kwargs + ): + super(SftpWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + self.type = 'SftpWriteSettings' # type: str + self.operation_timeout = operation_timeout + + +class ShopifyLinkedService(LinkedService): + """Shopify Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param host: Required. The endpoint of the Shopify server. (i.e. mystore.myshopify.com). + :type host: object + :param access_token: The API access token that can be used to access Shopify’s data. The token + won't expire if it is offline mode. + :type access_token: ~azure.synapse.artifacts.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using + HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name in the server's + certificate to match the host name of the server when connecting over SSL. The default value is + true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of the server when + connecting over SSL. The default value is true. + :type use_peer_verification: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -18676,6 +30067,7 @@ class SapTableLinkedService(LinkedService): _validation = { 'type': {'required': True}, + 'host': {'required': True}, } _attribute_map = { @@ -18685,72 +30077,42 @@ class SapTableLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'server': {'key': 'typeProperties.server', 'type': 'object'}, - 'system_number': {'key': 'typeProperties.systemNumber', 'type': 'object'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'language': {'key': 'typeProperties.language', 'type': 'object'}, - 'system_id': {'key': 'typeProperties.systemId', 'type': 'object'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'message_server': {'key': 'typeProperties.messageServer', 'type': 'object'}, - 'message_server_service': {'key': 'typeProperties.messageServerService', 'type': 'object'}, - 'snc_mode': {'key': 'typeProperties.sncMode', 'type': 'object'}, - 'snc_my_name': {'key': 'typeProperties.sncMyName', 'type': 'object'}, - 'snc_partner_name': {'key': 'typeProperties.sncPartnerName', 'type': 'object'}, - 'snc_library_path': {'key': 'typeProperties.sncLibraryPath', 'type': 'object'}, - 'snc_qop': {'key': 'typeProperties.sncQop', 'type': 'object'}, - 'logon_group': {'key': 'typeProperties.logonGroup', 'type': 'object'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, *, + host: object, additional_properties: Optional[Dict[str, object]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, - server: Optional[object] = None, - system_number: Optional[object] = None, - client_id: Optional[object] = None, - language: Optional[object] = None, - system_id: Optional[object] = None, - user_name: Optional[object] = None, - password: Optional["SecretBase"] = None, - message_server: Optional[object] = None, - message_server_service: Optional[object] = None, - snc_mode: Optional[object] = None, - snc_my_name: Optional[object] = None, - snc_partner_name: Optional[object] = None, - snc_library_path: Optional[object] = None, - snc_qop: Optional[object] = None, - logon_group: Optional[object] = None, + access_token: Optional["SecretBase"] = None, + use_encrypted_endpoints: Optional[object] = None, + use_host_verification: Optional[object] = None, + use_peer_verification: Optional[object] = None, encrypted_credential: Optional[object] = None, **kwargs ): - super(SapTableLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'SapTable' - self.server = server - self.system_number = system_number - self.client_id = client_id - self.language = language - self.system_id = system_id - self.user_name = user_name - self.password = password - self.message_server = message_server - self.message_server_service = message_server_service - self.snc_mode = snc_mode - self.snc_my_name = snc_my_name - self.snc_partner_name = snc_partner_name - self.snc_library_path = snc_library_path - self.snc_qop = snc_qop - self.logon_group = logon_group + super(ShopifyLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'Shopify' # type: str + self.host = host + self.access_token = access_token + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification self.encrypted_credential = encrypted_credential -class SapTableResourceDataset(Dataset): - """SAP Table Resource properties. +class ShopifyObjectDataset(Dataset): + """Shopify Service dataset. All required parameters must be populated in order to send to Azure. @@ -18776,15 +30138,13 @@ class SapTableResourceDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: Required. The name of the SAP Table. Type: string (or Expression with - resultType string). + :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object """ _validation = { 'type': {'required': True}, 'linked_service_name': {'required': True}, - 'table_name': {'required': True}, } _attribute_map = { @@ -18804,7 +30164,6 @@ def __init__( self, *, linked_service_name: "LinkedServiceReference", - table_name: object, additional_properties: Optional[Dict[str, object]] = None, description: Optional[str] = None, structure: Optional[object] = None, @@ -18812,445 +30171,510 @@ def __init__( parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, folder: Optional["DatasetFolder"] = None, + table_name: Optional[object] = None, **kwargs ): - super(SapTableResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'SapTableResource' + super(ShopifyObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'ShopifyObject' # type: str self.table_name = table_name -class ScriptAction(msrest.serialization.Model): - """Custom script action to run on HDI ondemand cluster once it's up. +class ShopifySource(TabularSource): + """A copy activity Shopify Service source. All required parameters must be populated in order to send to Azure. - :param name: Required. The user provided name of the script action. + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(ShopifySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'ShopifySource' # type: str + self.query = query + + +class Sku(msrest.serialization.Model): + """SQL pool SKU. + + :param tier: The service tier. + :type tier: str + :param name: The SKU name. :type name: str - :param uri: Required. The URI for the script action. - :type uri: str - :param roles: Required. The node types on which the script action should be executed. Possible - values include: "Headnode", "Workernode", "Zookeeper". - :type roles: str or ~azure.synapse.artifacts.models.HdiNodeTypes - :param parameters: The parameters for the script action. - :type parameters: str + :param capacity: If the SKU supports scale out/in then the capacity integer should be included. + If scale out/in is not possible for the resource this may be omitted. + :type capacity: int + """ + + _attribute_map = { + 'tier': {'key': 'tier', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'capacity': {'key': 'capacity', 'type': 'int'}, + } + + def __init__( + self, + *, + tier: Optional[str] = None, + name: Optional[str] = None, + capacity: Optional[int] = None, + **kwargs + ): + super(Sku, self).__init__(**kwargs) + self.tier = tier + self.name = name + self.capacity = capacity + + +class SparkBatchJob(msrest.serialization.Model): + """SparkBatchJob. + + All required parameters must be populated in order to send to Azure. + + :param livy_info: + :type livy_info: ~azure.synapse.artifacts.models.SparkBatchJobState + :param name: The batch name. + :type name: str + :param workspace_name: The workspace name. + :type workspace_name: str + :param spark_pool_name: The Spark pool name. + :type spark_pool_name: str + :param submitter_name: The submitter name. + :type submitter_name: str + :param submitter_id: The submitter identifier. + :type submitter_id: str + :param artifact_id: The artifact identifier. + :type artifact_id: str + :param job_type: The job type. Possible values include: "SparkBatch", "SparkSession". + :type job_type: str or ~azure.synapse.artifacts.models.SparkJobType + :param result: The Spark batch job result. Possible values include: "Uncertain", "Succeeded", + "Failed", "Cancelled". + :type result: str or ~azure.synapse.artifacts.models.SparkBatchJobResultType + :param scheduler: The scheduler information. + :type scheduler: ~azure.synapse.artifacts.models.SparkScheduler + :param plugin: The plugin information. + :type plugin: ~azure.synapse.artifacts.models.SparkServicePlugin + :param errors: The error information. + :type errors: list[~azure.synapse.artifacts.models.SparkServiceError] + :param tags: A set of tags. The tags. + :type tags: dict[str, str] + :param id: Required. The session Id. + :type id: int + :param app_id: The application id of this session. + :type app_id: str + :param app_info: The detailed application info. + :type app_info: dict[str, str] + :param state: The batch state. + :type state: str + :param log_lines: The log lines. + :type log_lines: list[str] """ _validation = { - 'name': {'required': True}, - 'uri': {'required': True}, - 'roles': {'required': True}, + 'id': {'required': True}, } _attribute_map = { + 'livy_info': {'key': 'livyInfo', 'type': 'SparkBatchJobState'}, 'name': {'key': 'name', 'type': 'str'}, - 'uri': {'key': 'uri', 'type': 'str'}, - 'roles': {'key': 'roles', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': 'str'}, + 'workspace_name': {'key': 'workspaceName', 'type': 'str'}, + 'spark_pool_name': {'key': 'sparkPoolName', 'type': 'str'}, + 'submitter_name': {'key': 'submitterName', 'type': 'str'}, + 'submitter_id': {'key': 'submitterId', 'type': 'str'}, + 'artifact_id': {'key': 'artifactId', 'type': 'str'}, + 'job_type': {'key': 'jobType', 'type': 'str'}, + 'result': {'key': 'result', 'type': 'str'}, + 'scheduler': {'key': 'schedulerInfo', 'type': 'SparkScheduler'}, + 'plugin': {'key': 'pluginInfo', 'type': 'SparkServicePlugin'}, + 'errors': {'key': 'errorInfo', 'type': '[SparkServiceError]'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'id': {'key': 'id', 'type': 'int'}, + 'app_id': {'key': 'appId', 'type': 'str'}, + 'app_info': {'key': 'appInfo', 'type': '{str}'}, + 'state': {'key': 'state', 'type': 'str'}, + 'log_lines': {'key': 'log', 'type': '[str]'}, } def __init__( self, *, - name: str, - uri: str, - roles: Union[str, "HdiNodeTypes"], - parameters: Optional[str] = None, + id: int, + livy_info: Optional["SparkBatchJobState"] = None, + name: Optional[str] = None, + workspace_name: Optional[str] = None, + spark_pool_name: Optional[str] = None, + submitter_name: Optional[str] = None, + submitter_id: Optional[str] = None, + artifact_id: Optional[str] = None, + job_type: Optional[Union[str, "SparkJobType"]] = None, + result: Optional[Union[str, "SparkBatchJobResultType"]] = None, + scheduler: Optional["SparkScheduler"] = None, + plugin: Optional["SparkServicePlugin"] = None, + errors: Optional[List["SparkServiceError"]] = None, + tags: Optional[Dict[str, str]] = None, + app_id: Optional[str] = None, + app_info: Optional[Dict[str, str]] = None, + state: Optional[str] = None, + log_lines: Optional[List[str]] = None, **kwargs ): - super(ScriptAction, self).__init__(**kwargs) + super(SparkBatchJob, self).__init__(**kwargs) + self.livy_info = livy_info self.name = name - self.uri = uri - self.roles = roles - self.parameters = parameters - + self.workspace_name = workspace_name + self.spark_pool_name = spark_pool_name + self.submitter_name = submitter_name + self.submitter_id = submitter_id + self.artifact_id = artifact_id + self.job_type = job_type + self.result = result + self.scheduler = scheduler + self.plugin = plugin + self.errors = errors + self.tags = tags + self.id = id + self.app_id = app_id + self.app_info = app_info + self.state = state + self.log_lines = log_lines -class SecureString(SecretBase): - """Azure Synapse secure string definition. The string value will be masked with asterisks '*' during Get or List API calls. - All required parameters must be populated in order to send to Azure. +class SparkBatchJobState(msrest.serialization.Model): + """SparkBatchJobState. - :param type: Required. Type of the secret.Constant filled by server. - :type type: str - :param value: Required. Value of secure string. - :type value: str + :param not_started_at: the time that at which "not_started" livy state was first seen. + :type not_started_at: ~datetime.datetime + :param starting_at: the time that at which "starting" livy state was first seen. + :type starting_at: ~datetime.datetime + :param running_at: the time that at which "running" livy state was first seen. + :type running_at: ~datetime.datetime + :param dead_at: time that at which "dead" livy state was first seen. + :type dead_at: ~datetime.datetime + :param success_at: the time that at which "success" livy state was first seen. + :type success_at: ~datetime.datetime + :param terminated_at: the time that at which "killed" livy state was first seen. + :type terminated_at: ~datetime.datetime + :param recovering_at: the time that at which "recovering" livy state was first seen. + :type recovering_at: ~datetime.datetime + :param current_state: the Spark job state. + :type current_state: str + :param job_creation_request: + :type job_creation_request: ~azure.synapse.artifacts.models.SparkRequest """ - _validation = { - 'type': {'required': True}, - 'value': {'required': True}, - } - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'str'}, + 'not_started_at': {'key': 'notStartedAt', 'type': 'iso-8601'}, + 'starting_at': {'key': 'startingAt', 'type': 'iso-8601'}, + 'running_at': {'key': 'runningAt', 'type': 'iso-8601'}, + 'dead_at': {'key': 'deadAt', 'type': 'iso-8601'}, + 'success_at': {'key': 'successAt', 'type': 'iso-8601'}, + 'terminated_at': {'key': 'killedAt', 'type': 'iso-8601'}, + 'recovering_at': {'key': 'recoveringAt', 'type': 'iso-8601'}, + 'current_state': {'key': 'currentState', 'type': 'str'}, + 'job_creation_request': {'key': 'jobCreationRequest', 'type': 'SparkRequest'}, } def __init__( self, *, - value: str, + not_started_at: Optional[datetime.datetime] = None, + starting_at: Optional[datetime.datetime] = None, + running_at: Optional[datetime.datetime] = None, + dead_at: Optional[datetime.datetime] = None, + success_at: Optional[datetime.datetime] = None, + terminated_at: Optional[datetime.datetime] = None, + recovering_at: Optional[datetime.datetime] = None, + current_state: Optional[str] = None, + job_creation_request: Optional["SparkRequest"] = None, **kwargs ): - super(SecureString, self).__init__(**kwargs) - self.type: str = 'SecureString' - self.value = value + super(SparkBatchJobState, self).__init__(**kwargs) + self.not_started_at = not_started_at + self.starting_at = starting_at + self.running_at = running_at + self.dead_at = dead_at + self.success_at = success_at + self.terminated_at = terminated_at + self.recovering_at = recovering_at + self.current_state = current_state + self.job_creation_request = job_creation_request -class ServiceNowLinkedService(LinkedService): - """ServiceNow server linked service. +class SparkJobDefinition(msrest.serialization.Model): + """Spark job definition. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. + :param description: The description of the Spark job definition. :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param endpoint: Required. The endpoint of the ServiceNow server. (i.e. - :code:``.service-now.com). - :type endpoint: object - :param authentication_type: Required. The authentication type to use. Possible values include: - "Basic", "OAuth2". - :type authentication_type: str or ~azure.synapse.artifacts.models.ServiceNowAuthenticationType - :param username: The user name used to connect to the ServiceNow server for Basic and OAuth2 - authentication. - :type username: object - :param password: The password corresponding to the user name for Basic and OAuth2 - authentication. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param client_id: The client id for OAuth2 authentication. - :type client_id: object - :param client_secret: The client secret for OAuth2 authentication. - :type client_secret: ~azure.synapse.artifacts.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using - HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name in the server's - certificate to match the host name of the server when connecting over SSL. The default value is - true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of the server when - connecting over SSL. The default value is true. - :type use_peer_verification: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :param target_big_data_pool: Required. Big data pool reference. + :type target_big_data_pool: ~azure.synapse.artifacts.models.BigDataPoolReference + :param required_spark_version: The required Spark version of the application. + :type required_spark_version: str + :param language: The language of the Spark application. + :type language: str + :param job_properties: Required. The properties of the Spark job. + :type job_properties: ~azure.synapse.artifacts.models.SparkJobProperties """ _validation = { - 'type': {'required': True}, - 'endpoint': {'required': True}, - 'authentication_type': {'required': True}, + 'target_big_data_pool': {'required': True}, + 'job_properties': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, - 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'target_big_data_pool': {'key': 'targetBigDataPool', 'type': 'BigDataPoolReference'}, + 'required_spark_version': {'key': 'requiredSparkVersion', 'type': 'str'}, + 'language': {'key': 'language', 'type': 'str'}, + 'job_properties': {'key': 'jobProperties', 'type': 'SparkJobProperties'}, } def __init__( self, *, - endpoint: object, - authentication_type: Union[str, "ServiceNowAuthenticationType"], + target_big_data_pool: "BigDataPoolReference", + job_properties: "SparkJobProperties", additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - username: Optional[object] = None, - password: Optional["SecretBase"] = None, - client_id: Optional[object] = None, - client_secret: Optional["SecretBase"] = None, - use_encrypted_endpoints: Optional[object] = None, - use_host_verification: Optional[object] = None, - use_peer_verification: Optional[object] = None, - encrypted_credential: Optional[object] = None, + required_spark_version: Optional[str] = None, + language: Optional[str] = None, **kwargs ): - super(ServiceNowLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'ServiceNow' - self.endpoint = endpoint - self.authentication_type = authentication_type - self.username = username - self.password = password - self.client_id = client_id - self.client_secret = client_secret - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification - self.encrypted_credential = encrypted_credential + super(SparkJobDefinition, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.description = description + self.target_big_data_pool = target_big_data_pool + self.required_spark_version = required_spark_version + self.language = language + self.job_properties = job_properties -class ServiceNowObjectDataset(Dataset): - """ServiceNow server dataset. +class SparkJobDefinitionResource(AzureEntityResource): + """Spark job definition resource type. - All required parameters must be populated in order to send to Azure. + Variables are only populated by the server, and will be ignored when sending a request. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. - :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + :ivar etag: Resource Etag. + :vartype etag: str + :param properties: Required. Properties of spark job definition. + :type properties: ~azure.synapse.artifacts.models.SparkJobDefinition """ _validation = { - 'type': {'required': True}, - 'linked_service_name': {'required': True}, + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'SparkJobDefinition'}, } def __init__( self, *, - linked_service_name: "LinkedServiceReference", - additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, + properties: "SparkJobDefinition", **kwargs ): - super(ServiceNowObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'ServiceNowObject' - self.table_name = table_name + super(SparkJobDefinitionResource, self).__init__(**kwargs) + self.properties = properties -class SetVariableActivity(Activity): - """Set value for a Variable. +class SparkJobDefinitionsListResponse(msrest.serialization.Model): + """A list of spark job definitions resources. All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. - :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param variable_name: Name of the variable whose value needs to be set. - :type variable_name: str - :param value: Value to be set. Could be a static value or Expression. - :type value: object + :param value: Required. List of spark job definitions. + :type value: list[~azure.synapse.artifacts.models.SparkJobDefinitionResource] + :param next_link: The link to the next page of results, if any remaining results exist. + :type next_link: str """ _validation = { - 'name': {'required': True}, - 'type': {'required': True}, + 'value': {'required': True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'variable_name': {'key': 'typeProperties.variableName', 'type': 'str'}, - 'value': {'key': 'typeProperties.value', 'type': 'object'}, + 'value': {'key': 'value', 'type': '[SparkJobDefinitionResource]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, } def __init__( self, *, - name: str, - additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - depends_on: Optional[List["ActivityDependency"]] = None, - user_properties: Optional[List["UserProperty"]] = None, - variable_name: Optional[str] = None, - value: Optional[object] = None, + value: List["SparkJobDefinitionResource"], + next_link: Optional[str] = None, **kwargs ): - super(SetVariableActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.type: str = 'SetVariable' - self.variable_name = variable_name + super(SparkJobDefinitionsListResponse, self).__init__(**kwargs) self.value = value + self.next_link = next_link -class SftpServerLinkedService(LinkedService): - """A linked service for an SSH File Transfer Protocol (SFTP) server. +class SparkJobProperties(msrest.serialization.Model): + """The properties of the Spark job. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param host: Required. The SFTP server host name. Type: string (or Expression with resultType - string). - :type host: object - :param port: The TCP port number that the SFTP server uses to listen for client connections. - Default value is 22. Type: integer (or Expression with resultType integer), minimum: 0. - :type port: object - :param authentication_type: The authentication type to be used to connect to the FTP server. - Possible values include: "Basic", "SshPublicKey". - :type authentication_type: str or ~azure.synapse.artifacts.models.SftpAuthenticationType - :param user_name: The username used to log on to the SFTP server. Type: string (or Expression - with resultType string). - :type user_name: object - :param password: Password to logon the SFTP server for Basic authentication. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - :param private_key_path: The SSH private key file path for SshPublicKey authentication. Only - valid for on-premises copy. For on-premises copy with SshPublicKey authentication, either - PrivateKeyPath or PrivateKeyContent should be specified. SSH private key should be OpenSSH - format. Type: string (or Expression with resultType string). - :type private_key_path: object - :param private_key_content: Base64 encoded SSH private key content for SshPublicKey - authentication. For on-premises copy with SshPublicKey authentication, either PrivateKeyPath or - PrivateKeyContent should be specified. SSH private key should be OpenSSH format. - :type private_key_content: ~azure.synapse.artifacts.models.SecretBase - :param pass_phrase: The password to decrypt the SSH private key if the SSH private key is - encrypted. - :type pass_phrase: ~azure.synapse.artifacts.models.SecretBase - :param skip_host_key_validation: If true, skip the SSH host key validation. Default value is - false. Type: boolean (or Expression with resultType boolean). - :type skip_host_key_validation: object - :param host_key_fingerprint: The host key finger-print of the SFTP server. When - SkipHostKeyValidation is false, HostKeyFingerprint should be specified. Type: string (or - Expression with resultType string). - :type host_key_fingerprint: object + :param name: The name of the job. + :type name: str + :param file: Required. File containing the application to execute. + :type file: str + :param class_name: Main class for Java/Scala application. + :type class_name: str + :param conf: Spark configuration properties. + :type conf: object + :param args: Command line arguments for the application. + :type args: list[str] + :param jars: Jars to be used in this job. + :type jars: list[str] + :param files: files to be used in this job. + :type files: list[str] + :param archives: Archives to be used in this job. + :type archives: list[str] + :param driver_memory: Required. Amount of memory to use for the driver process. + :type driver_memory: str + :param driver_cores: Required. Number of cores to use for the driver. + :type driver_cores: int + :param executor_memory: Required. Amount of memory to use per executor process. + :type executor_memory: str + :param executor_cores: Required. Number of cores to use for each executor. + :type executor_cores: int + :param num_executors: Required. Number of executors to launch for this job. + :type num_executors: int """ _validation = { - 'type': {'required': True}, - 'host': {'required': True}, + 'file': {'required': True}, + 'driver_memory': {'required': True}, + 'driver_cores': {'required': True}, + 'executor_memory': {'required': True}, + 'executor_cores': {'required': True}, + 'num_executors': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - 'private_key_path': {'key': 'typeProperties.privateKeyPath', 'type': 'object'}, - 'private_key_content': {'key': 'typeProperties.privateKeyContent', 'type': 'SecretBase'}, - 'pass_phrase': {'key': 'typeProperties.passPhrase', 'type': 'SecretBase'}, - 'skip_host_key_validation': {'key': 'typeProperties.skipHostKeyValidation', 'type': 'object'}, - 'host_key_fingerprint': {'key': 'typeProperties.hostKeyFingerprint', 'type': 'object'}, + 'name': {'key': 'name', 'type': 'str'}, + 'file': {'key': 'file', 'type': 'str'}, + 'class_name': {'key': 'className', 'type': 'str'}, + 'conf': {'key': 'conf', 'type': 'object'}, + 'args': {'key': 'args', 'type': '[str]'}, + 'jars': {'key': 'jars', 'type': '[str]'}, + 'files': {'key': 'files', 'type': '[str]'}, + 'archives': {'key': 'archives', 'type': '[str]'}, + 'driver_memory': {'key': 'driverMemory', 'type': 'str'}, + 'driver_cores': {'key': 'driverCores', 'type': 'int'}, + 'executor_memory': {'key': 'executorMemory', 'type': 'str'}, + 'executor_cores': {'key': 'executorCores', 'type': 'int'}, + 'num_executors': {'key': 'numExecutors', 'type': 'int'}, } def __init__( self, *, - host: object, + file: str, + driver_memory: str, + driver_cores: int, + executor_memory: str, + executor_cores: int, + num_executors: int, additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, - description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - port: Optional[object] = None, - authentication_type: Optional[Union[str, "SftpAuthenticationType"]] = None, - user_name: Optional[object] = None, - password: Optional["SecretBase"] = None, - encrypted_credential: Optional[object] = None, - private_key_path: Optional[object] = None, - private_key_content: Optional["SecretBase"] = None, - pass_phrase: Optional["SecretBase"] = None, - skip_host_key_validation: Optional[object] = None, - host_key_fingerprint: Optional[object] = None, + name: Optional[str] = None, + class_name: Optional[str] = None, + conf: Optional[object] = None, + args: Optional[List[str]] = None, + jars: Optional[List[str]] = None, + files: Optional[List[str]] = None, + archives: Optional[List[str]] = None, **kwargs ): - super(SftpServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Sftp' - self.host = host - self.port = port - self.authentication_type = authentication_type - self.user_name = user_name - self.password = password - self.encrypted_credential = encrypted_credential - self.private_key_path = private_key_path - self.private_key_content = private_key_content - self.pass_phrase = pass_phrase - self.skip_host_key_validation = skip_host_key_validation - self.host_key_fingerprint = host_key_fingerprint + super(SparkJobProperties, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.name = name + self.file = file + self.class_name = class_name + self.conf = conf + self.args = args + self.jars = jars + self.files = files + self.archives = archives + self.driver_memory = driver_memory + self.driver_cores = driver_cores + self.executor_memory = executor_memory + self.executor_cores = executor_cores + self.num_executors = num_executors -class ShopifyLinkedService(LinkedService): - """Shopify Service linked service. +class SparkLinkedService(LinkedService): + """Spark Server linked service. All required parameters must be populated in order to send to Azure. @@ -19267,21 +30691,45 @@ class ShopifyLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param host: Required. The endpoint of the Shopify server. (i.e. mystore.myshopify.com). + :param host: Required. IP address or host name of the Spark server. :type host: object - :param access_token: The API access token that can be used to access Shopify’s data. The token - won't expire if it is offline mode. - :type access_token: ~azure.synapse.artifacts.models.SecretBase - :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using - HTTPS. The default value is true. - :type use_encrypted_endpoints: object - :param use_host_verification: Specifies whether to require the host name in the server's - certificate to match the host name of the server when connecting over SSL. The default value is - true. - :type use_host_verification: object - :param use_peer_verification: Specifies whether to verify the identity of the server when - connecting over SSL. The default value is true. - :type use_peer_verification: object + :param port: Required. The TCP port that the Spark server uses to listen for client + connections. + :type port: object + :param server_type: The type of Spark server. Possible values include: "SharkServer", + "SharkServer2", "SparkThriftServer". + :type server_type: str or ~azure.synapse.artifacts.models.SparkServerType + :param thrift_transport_protocol: The transport protocol to use in the Thrift layer. Possible + values include: "Binary", "SASL", "HTTP ". + :type thrift_transport_protocol: str or + ~azure.synapse.artifacts.models.SparkThriftTransportProtocol + :param authentication_type: Required. The authentication method used to access the Spark + server. Possible values include: "Anonymous", "Username", "UsernameAndPassword", + "WindowsAzureHDInsightService". + :type authentication_type: str or ~azure.synapse.artifacts.models.SparkAuthenticationType + :param username: The user name that you use to access Spark Server. + :type username: object + :param password: The password corresponding to the user name that you provided in the Username + field. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param http_path: The partial URL corresponding to the Spark server. + :type http_path: object + :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The + default value is false. + :type enable_ssl: object + :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for + verifying the server when connecting over SSL. This property can only be set when using SSL on + self-hosted IR. The default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust + store or from a specified PEM file. The default value is false. + :type use_system_trust_store: object + :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate + name to match the host name of the server when connecting over SSL. The default value is false. + :type allow_host_name_cn_mismatch: object + :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from + the server. The default value is false. + :type allow_self_signed_server_cert: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -19291,6 +30739,8 @@ class ShopifyLinkedService(LinkedService): _validation = { 'type': {'required': True}, 'host': {'required': True}, + 'port': {'required': True}, + 'authentication_type': {'required': True}, } _attribute_map = { @@ -19301,10 +30751,18 @@ class ShopifyLinkedService(LinkedService): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, - 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, - 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, - 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'server_type': {'key': 'typeProperties.serverType', 'type': 'str'}, + 'thrift_transport_protocol': {'key': 'typeProperties.thriftTransportProtocol', 'type': 'str'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, + 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -19312,30 +30770,46 @@ def __init__( self, *, host: object, + port: object, + authentication_type: Union[str, "SparkAuthenticationType"], additional_properties: Optional[Dict[str, object]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, - access_token: Optional["SecretBase"] = None, - use_encrypted_endpoints: Optional[object] = None, - use_host_verification: Optional[object] = None, - use_peer_verification: Optional[object] = None, + server_type: Optional[Union[str, "SparkServerType"]] = None, + thrift_transport_protocol: Optional[Union[str, "SparkThriftTransportProtocol"]] = None, + username: Optional[object] = None, + password: Optional["SecretBase"] = None, + http_path: Optional[object] = None, + enable_ssl: Optional[object] = None, + trusted_cert_path: Optional[object] = None, + use_system_trust_store: Optional[object] = None, + allow_host_name_cn_mismatch: Optional[object] = None, + allow_self_signed_server_cert: Optional[object] = None, encrypted_credential: Optional[object] = None, **kwargs ): - super(ShopifyLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Shopify' + super(SparkLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'Spark' # type: str self.host = host - self.access_token = access_token - self.use_encrypted_endpoints = use_encrypted_endpoints - self.use_host_verification = use_host_verification - self.use_peer_verification = use_peer_verification + self.port = port + self.server_type = server_type + self.thrift_transport_protocol = thrift_transport_protocol + self.authentication_type = authentication_type + self.username = username + self.password = password + self.http_path = http_path + self.enable_ssl = enable_ssl + self.trusted_cert_path = trusted_cert_path + self.use_system_trust_store = use_system_trust_store + self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch + self.allow_self_signed_server_cert = allow_self_signed_server_cert self.encrypted_credential = encrypted_credential -class ShopifyObjectDataset(Dataset): - """Shopify Service dataset. +class SparkObjectDataset(Dataset): + """Spark Server dataset. All required parameters must be populated in order to send to Azure. @@ -19361,8 +30835,14 @@ class ShopifyObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). + :param table_name: This property will be retired. Please consider using schema + table + properties instead. :type table_name: object + :param table: The table name of the Spark. Type: string (or Expression with resultType string). + :type table: object + :param schema_type_properties_schema: The schema name of the Spark. Type: string (or Expression + with resultType string). + :type schema_type_properties_schema: object """ _validation = { @@ -19381,6 +30861,8 @@ class ShopifyObjectDataset(Dataset): 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } def __init__( @@ -19395,881 +30877,874 @@ def __init__( annotations: Optional[List[object]] = None, folder: Optional["DatasetFolder"] = None, table_name: Optional[object] = None, + table: Optional[object] = None, + schema_type_properties_schema: Optional[object] = None, **kwargs ): - super(ShopifyObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'ShopifyObject' + super(SparkObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'SparkObject' # type: str self.table_name = table_name + self.table = table + self.schema_type_properties_schema = schema_type_properties_schema + + +class SparkRequest(msrest.serialization.Model): + """SparkRequest. + + :param name: + :type name: str + :param file: + :type file: str + :param class_name: + :type class_name: str + :param arguments: + :type arguments: list[str] + :param jars: + :type jars: list[str] + :param python_files: + :type python_files: list[str] + :param files: + :type files: list[str] + :param archives: + :type archives: list[str] + :param configuration: Dictionary of :code:``. + :type configuration: dict[str, str] + :param driver_memory: + :type driver_memory: str + :param driver_cores: + :type driver_cores: int + :param executor_memory: + :type executor_memory: str + :param executor_cores: + :type executor_cores: int + :param executor_count: + :type executor_count: int + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'file': {'key': 'file', 'type': 'str'}, + 'class_name': {'key': 'className', 'type': 'str'}, + 'arguments': {'key': 'args', 'type': '[str]'}, + 'jars': {'key': 'jars', 'type': '[str]'}, + 'python_files': {'key': 'pyFiles', 'type': '[str]'}, + 'files': {'key': 'files', 'type': '[str]'}, + 'archives': {'key': 'archives', 'type': '[str]'}, + 'configuration': {'key': 'conf', 'type': '{str}'}, + 'driver_memory': {'key': 'driverMemory', 'type': 'str'}, + 'driver_cores': {'key': 'driverCores', 'type': 'int'}, + 'executor_memory': {'key': 'executorMemory', 'type': 'str'}, + 'executor_cores': {'key': 'executorCores', 'type': 'int'}, + 'executor_count': {'key': 'numExecutors', 'type': 'int'}, + } + + def __init__( + self, + *, + name: Optional[str] = None, + file: Optional[str] = None, + class_name: Optional[str] = None, + arguments: Optional[List[str]] = None, + jars: Optional[List[str]] = None, + python_files: Optional[List[str]] = None, + files: Optional[List[str]] = None, + archives: Optional[List[str]] = None, + configuration: Optional[Dict[str, str]] = None, + driver_memory: Optional[str] = None, + driver_cores: Optional[int] = None, + executor_memory: Optional[str] = None, + executor_cores: Optional[int] = None, + executor_count: Optional[int] = None, + **kwargs + ): + super(SparkRequest, self).__init__(**kwargs) + self.name = name + self.file = file + self.class_name = class_name + self.arguments = arguments + self.jars = jars + self.python_files = python_files + self.files = files + self.archives = archives + self.configuration = configuration + self.driver_memory = driver_memory + self.driver_cores = driver_cores + self.executor_memory = executor_memory + self.executor_cores = executor_cores + self.executor_count = executor_count + + +class SparkScheduler(msrest.serialization.Model): + """SparkScheduler. + :param submitted_at: + :type submitted_at: ~datetime.datetime + :param scheduled_at: + :type scheduled_at: ~datetime.datetime + :param ended_at: + :type ended_at: ~datetime.datetime + :param cancellation_requested_at: + :type cancellation_requested_at: ~datetime.datetime + :param current_state: Possible values include: "Queued", "Scheduled", "Ended". + :type current_state: str or ~azure.synapse.artifacts.models.SchedulerCurrentState + """ + + _attribute_map = { + 'submitted_at': {'key': 'submittedAt', 'type': 'iso-8601'}, + 'scheduled_at': {'key': 'scheduledAt', 'type': 'iso-8601'}, + 'ended_at': {'key': 'endedAt', 'type': 'iso-8601'}, + 'cancellation_requested_at': {'key': 'cancellationRequestedAt', 'type': 'iso-8601'}, + 'current_state': {'key': 'currentState', 'type': 'str'}, + } + + def __init__( + self, + *, + submitted_at: Optional[datetime.datetime] = None, + scheduled_at: Optional[datetime.datetime] = None, + ended_at: Optional[datetime.datetime] = None, + cancellation_requested_at: Optional[datetime.datetime] = None, + current_state: Optional[Union[str, "SchedulerCurrentState"]] = None, + **kwargs + ): + super(SparkScheduler, self).__init__(**kwargs) + self.submitted_at = submitted_at + self.scheduled_at = scheduled_at + self.ended_at = ended_at + self.cancellation_requested_at = cancellation_requested_at + self.current_state = current_state -class SparkBatchJob(msrest.serialization.Model): - """SparkBatchJob. - All required parameters must be populated in order to send to Azure. +class SparkServiceError(msrest.serialization.Model): + """SparkServiceError. - :param livy_info: - :type livy_info: ~azure.synapse.artifacts.models.SparkBatchJobState - :param name: The batch name. - :type name: str - :param workspace_name: The workspace name. - :type workspace_name: str - :param spark_pool_name: The Spark pool name. - :type spark_pool_name: str - :param submitter_name: The submitter name. - :type submitter_name: str - :param submitter_id: The submitter identifier. - :type submitter_id: str - :param artifact_id: The artifact identifier. - :type artifact_id: str - :param job_type: The job type. Possible values include: "SparkBatch", "SparkSession". - :type job_type: str or ~azure.synapse.artifacts.models.SparkJobType - :param result: The Spark batch job result. Possible values include: "Uncertain", "Succeeded", - "Failed", "Cancelled". - :type result: str or ~azure.synapse.artifacts.models.SparkBatchJobResultType - :param scheduler: The scheduler information. - :type scheduler: ~azure.synapse.artifacts.models.SparkScheduler - :param plugin: The plugin information. - :type plugin: ~azure.synapse.artifacts.models.SparkServicePlugin - :param errors: The error information. - :type errors: list[~azure.synapse.artifacts.models.SparkServiceError] - :param tags: A set of tags. The tags. - :type tags: dict[str, str] - :param id: Required. The session Id. - :type id: int - :param app_id: The application id of this session. - :type app_id: str - :param app_info: The detailed application info. - :type app_info: dict[str, str] - :param state: The batch state. - :type state: str - :param log_lines: The log lines. - :type log_lines: list[str] + :param message: + :type message: str + :param error_code: + :type error_code: str + :param source: Possible values include: "System", "User", "Unknown", "Dependency". + :type source: str or ~azure.synapse.artifacts.models.SparkErrorSource """ - _validation = { - 'id': {'required': True}, - } - _attribute_map = { - 'livy_info': {'key': 'livyInfo', 'type': 'SparkBatchJobState'}, - 'name': {'key': 'name', 'type': 'str'}, - 'workspace_name': {'key': 'workspaceName', 'type': 'str'}, - 'spark_pool_name': {'key': 'sparkPoolName', 'type': 'str'}, - 'submitter_name': {'key': 'submitterName', 'type': 'str'}, - 'submitter_id': {'key': 'submitterId', 'type': 'str'}, - 'artifact_id': {'key': 'artifactId', 'type': 'str'}, - 'job_type': {'key': 'jobType', 'type': 'str'}, - 'result': {'key': 'result', 'type': 'str'}, - 'scheduler': {'key': 'schedulerInfo', 'type': 'SparkScheduler'}, - 'plugin': {'key': 'pluginInfo', 'type': 'SparkServicePlugin'}, - 'errors': {'key': 'errorInfo', 'type': '[SparkServiceError]'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'id': {'key': 'id', 'type': 'int'}, - 'app_id': {'key': 'appId', 'type': 'str'}, - 'app_info': {'key': 'appInfo', 'type': '{str}'}, - 'state': {'key': 'state', 'type': 'str'}, - 'log_lines': {'key': 'log', 'type': '[str]'}, + 'message': {'key': 'message', 'type': 'str'}, + 'error_code': {'key': 'errorCode', 'type': 'str'}, + 'source': {'key': 'source', 'type': 'str'}, } def __init__( self, *, - id: int, - livy_info: Optional["SparkBatchJobState"] = None, - name: Optional[str] = None, - workspace_name: Optional[str] = None, - spark_pool_name: Optional[str] = None, - submitter_name: Optional[str] = None, - submitter_id: Optional[str] = None, - artifact_id: Optional[str] = None, - job_type: Optional[Union[str, "SparkJobType"]] = None, - result: Optional[Union[str, "SparkBatchJobResultType"]] = None, - scheduler: Optional["SparkScheduler"] = None, - plugin: Optional["SparkServicePlugin"] = None, - errors: Optional[List["SparkServiceError"]] = None, - tags: Optional[Dict[str, str]] = None, - app_id: Optional[str] = None, - app_info: Optional[Dict[str, str]] = None, - state: Optional[str] = None, - log_lines: Optional[List[str]] = None, + message: Optional[str] = None, + error_code: Optional[str] = None, + source: Optional[Union[str, "SparkErrorSource"]] = None, **kwargs ): - super(SparkBatchJob, self).__init__(**kwargs) - self.livy_info = livy_info - self.name = name - self.workspace_name = workspace_name - self.spark_pool_name = spark_pool_name - self.submitter_name = submitter_name - self.submitter_id = submitter_id - self.artifact_id = artifact_id - self.job_type = job_type - self.result = result - self.scheduler = scheduler - self.plugin = plugin - self.errors = errors - self.tags = tags - self.id = id - self.app_id = app_id - self.app_info = app_info - self.state = state - self.log_lines = log_lines + super(SparkServiceError, self).__init__(**kwargs) + self.message = message + self.error_code = error_code + self.source = source -class SparkBatchJobState(msrest.serialization.Model): - """SparkBatchJobState. +class SparkServicePlugin(msrest.serialization.Model): + """SparkServicePlugin. - :param not_started_at: the time that at which "not_started" livy state was first seen. - :type not_started_at: ~datetime.datetime - :param starting_at: the time that at which "starting" livy state was first seen. - :type starting_at: ~datetime.datetime - :param running_at: the time that at which "running" livy state was first seen. - :type running_at: ~datetime.datetime - :param dead_at: time that at which "dead" livy state was first seen. - :type dead_at: ~datetime.datetime - :param success_at: the time that at which "success" livy state was first seen. - :type success_at: ~datetime.datetime - :param terminated_at: the time that at which "killed" livy state was first seen. - :type terminated_at: ~datetime.datetime - :param recovering_at: the time that at which "recovering" livy state was first seen. - :type recovering_at: ~datetime.datetime - :param current_state: the Spark job state. - :type current_state: str - :param job_creation_request: - :type job_creation_request: ~azure.synapse.artifacts.models.SparkRequest + :param preparation_started_at: + :type preparation_started_at: ~datetime.datetime + :param resource_acquisition_started_at: + :type resource_acquisition_started_at: ~datetime.datetime + :param submission_started_at: + :type submission_started_at: ~datetime.datetime + :param monitoring_started_at: + :type monitoring_started_at: ~datetime.datetime + :param cleanup_started_at: + :type cleanup_started_at: ~datetime.datetime + :param current_state: Possible values include: "Preparation", "ResourceAcquisition", "Queued", + "Submission", "Monitoring", "Cleanup", "Ended". + :type current_state: str or ~azure.synapse.artifacts.models.PluginCurrentState """ _attribute_map = { - 'not_started_at': {'key': 'notStartedAt', 'type': 'iso-8601'}, - 'starting_at': {'key': 'startingAt', 'type': 'iso-8601'}, - 'running_at': {'key': 'runningAt', 'type': 'iso-8601'}, - 'dead_at': {'key': 'deadAt', 'type': 'iso-8601'}, - 'success_at': {'key': 'successAt', 'type': 'iso-8601'}, - 'terminated_at': {'key': 'killedAt', 'type': 'iso-8601'}, - 'recovering_at': {'key': 'recoveringAt', 'type': 'iso-8601'}, + 'preparation_started_at': {'key': 'preparationStartedAt', 'type': 'iso-8601'}, + 'resource_acquisition_started_at': {'key': 'resourceAcquisitionStartedAt', 'type': 'iso-8601'}, + 'submission_started_at': {'key': 'submissionStartedAt', 'type': 'iso-8601'}, + 'monitoring_started_at': {'key': 'monitoringStartedAt', 'type': 'iso-8601'}, + 'cleanup_started_at': {'key': 'cleanupStartedAt', 'type': 'iso-8601'}, 'current_state': {'key': 'currentState', 'type': 'str'}, - 'job_creation_request': {'key': 'jobCreationRequest', 'type': 'SparkRequest'}, } def __init__( self, *, - not_started_at: Optional[datetime.datetime] = None, - starting_at: Optional[datetime.datetime] = None, - running_at: Optional[datetime.datetime] = None, - dead_at: Optional[datetime.datetime] = None, - success_at: Optional[datetime.datetime] = None, - terminated_at: Optional[datetime.datetime] = None, - recovering_at: Optional[datetime.datetime] = None, - current_state: Optional[str] = None, - job_creation_request: Optional["SparkRequest"] = None, + preparation_started_at: Optional[datetime.datetime] = None, + resource_acquisition_started_at: Optional[datetime.datetime] = None, + submission_started_at: Optional[datetime.datetime] = None, + monitoring_started_at: Optional[datetime.datetime] = None, + cleanup_started_at: Optional[datetime.datetime] = None, + current_state: Optional[Union[str, "PluginCurrentState"]] = None, **kwargs ): - super(SparkBatchJobState, self).__init__(**kwargs) - self.not_started_at = not_started_at - self.starting_at = starting_at - self.running_at = running_at - self.dead_at = dead_at - self.success_at = success_at - self.terminated_at = terminated_at - self.recovering_at = recovering_at + super(SparkServicePlugin, self).__init__(**kwargs) + self.preparation_started_at = preparation_started_at + self.resource_acquisition_started_at = resource_acquisition_started_at + self.submission_started_at = submission_started_at + self.monitoring_started_at = monitoring_started_at + self.cleanup_started_at = cleanup_started_at self.current_state = current_state - self.job_creation_request = job_creation_request -class SparkJobDefinition(msrest.serialization.Model): - """Spark job definition. +class SparkSource(TabularSource): + """A copy activity Spark Server source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param description: The description of the Spark job definition. - :type description: str - :param target_big_data_pool: Required. Big data pool reference. - :type target_big_data_pool: ~azure.synapse.artifacts.models.BigDataPoolReference - :param required_spark_version: The required Spark version of the application. - :type required_spark_version: str - :param language: The language of the Spark application. - :type language: str - :param job_properties: Required. The properties of the Spark job. - :type job_properties: ~azure.synapse.artifacts.models.SparkJobProperties + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object """ _validation = { - 'target_big_data_pool': {'required': True}, - 'job_properties': {'required': True}, + 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'description': {'key': 'description', 'type': 'str'}, - 'target_big_data_pool': {'key': 'targetBigDataPool', 'type': 'BigDataPoolReference'}, - 'required_spark_version': {'key': 'requiredSparkVersion', 'type': 'str'}, - 'language': {'key': 'language', 'type': 'str'}, - 'job_properties': {'key': 'jobProperties', 'type': 'SparkJobProperties'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, } def __init__( self, *, - target_big_data_pool: "BigDataPoolReference", - job_properties: "SparkJobProperties", additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - required_spark_version: Optional[str] = None, - language: Optional[str] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, **kwargs ): - super(SparkJobDefinition, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.description = description - self.target_big_data_pool = target_big_data_pool - self.required_spark_version = required_spark_version - self.language = language - self.job_properties = job_properties - + super(SparkSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'SparkSource' # type: str + self.query = query -class SparkJobDefinitionResource(SubResource): - """Spark job definition resource type. - Variables are only populated by the server, and will be ignored when sending a request. +class SqlConnection(msrest.serialization.Model): + """The connection used to execute the SQL script. All required parameters must be populated in order to send to Azure. - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - :param properties: Required. Properties of spark job definition. - :type properties: ~azure.synapse.artifacts.models.SparkJobDefinition + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The type of the connection. Possible values include: "SqlOnDemand", + "SqlPool". + :type type: str or ~azure.synapse.artifacts.models.SqlConnectionType + :param name: Required. The identifier of the connection. + :type name: str """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'properties': {'required': True}, + 'type': {'required': True}, + 'name': {'required': True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'SparkJobDefinition'}, + 'name': {'key': 'name', 'type': 'str'}, } def __init__( self, *, - properties: "SparkJobDefinition", + type: Union[str, "SqlConnectionType"], + name: str, + additional_properties: Optional[Dict[str, object]] = None, **kwargs ): - super(SparkJobDefinitionResource, self).__init__(**kwargs) - self.properties = properties + super(SqlConnection, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = type + self.name = name -class SparkJobDefinitionsListResponse(msrest.serialization.Model): - """A list of spark job definitions resources. +class SqlDWSink(CopySink): + """A copy activity SQL Data Warehouse sink. All required parameters must be populated in order to send to Azure. - :param value: Required. List of spark job definitions. - :type value: list[~azure.synapse.artifacts.models.SparkJobDefinitionResource] - :param next_link: The link to the next page of results, if any remaining results exist. - :type next_link: str + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType + string). + :type pre_copy_script: object + :param allow_poly_base: Indicates to use PolyBase to copy data into SQL Data Warehouse when + applicable. Type: boolean (or Expression with resultType boolean). + :type allow_poly_base: object + :param poly_base_settings: Specifies PolyBase-related settings when allowPolyBase is true. + :type poly_base_settings: ~azure.synapse.artifacts.models.PolybaseSettings + :param allow_copy_command: Indicates to use Copy Command to copy data into SQL Data Warehouse. + Type: boolean (or Expression with resultType boolean). + :type allow_copy_command: object + :param copy_command_settings: Specifies Copy Command related settings when allowCopyCommand is + true. + :type copy_command_settings: ~azure.synapse.artifacts.models.DWCopyCommandSettings + :param table_option: The option to handle sink table, such as autoCreate. For now only + 'autoCreate' value is supported. Type: string (or Expression with resultType string). + :type table_option: object """ _validation = { - 'value': {'required': True}, + 'type': {'required': True}, } _attribute_map = { - 'value': {'key': 'value', 'type': '[SparkJobDefinitionResource]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'allow_poly_base': {'key': 'allowPolyBase', 'type': 'object'}, + 'poly_base_settings': {'key': 'polyBaseSettings', 'type': 'PolybaseSettings'}, + 'allow_copy_command': {'key': 'allowCopyCommand', 'type': 'object'}, + 'copy_command_settings': {'key': 'copyCommandSettings', 'type': 'DWCopyCommandSettings'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, } def __init__( self, *, - value: List["SparkJobDefinitionResource"], - next_link: Optional[str] = None, + additional_properties: Optional[Dict[str, object]] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + pre_copy_script: Optional[object] = None, + allow_poly_base: Optional[object] = None, + poly_base_settings: Optional["PolybaseSettings"] = None, + allow_copy_command: Optional[object] = None, + copy_command_settings: Optional["DWCopyCommandSettings"] = None, + table_option: Optional[object] = None, **kwargs ): - super(SparkJobDefinitionsListResponse, self).__init__(**kwargs) - self.value = value - self.next_link = next_link + super(SqlDWSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'SqlDWSink' # type: str + self.pre_copy_script = pre_copy_script + self.allow_poly_base = allow_poly_base + self.poly_base_settings = poly_base_settings + self.allow_copy_command = allow_copy_command + self.copy_command_settings = copy_command_settings + self.table_option = table_option -class SparkJobProperties(msrest.serialization.Model): - """The properties of the Spark job. +class SqlDWSource(TabularSource): + """A copy activity SQL Data Warehouse source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: The name of the job. - :type name: str - :param file: Required. File containing the application to execute. - :type file: str - :param class_name: Main class for Java/Scala application. - :type class_name: str - :param conf: Spark configuration properties. - :type conf: object - :param args: Command line arguments for the application. - :type args: list[str] - :param jars: Jars to be used in this job. - :type jars: list[str] - :param files: files to be used in this job. - :type files: list[str] - :param archives: Archives to be used in this job. - :type archives: list[str] - :param driver_memory: Required. Amount of memory to use for the driver process. - :type driver_memory: str - :param driver_cores: Required. Number of cores to use for the driver. - :type driver_cores: int - :param executor_memory: Required. Amount of memory to use per executor process. - :type executor_memory: str - :param executor_cores: Required. Number of cores to use for each executor. - :type executor_cores: int - :param num_executors: Required. Number of executors to launch for this job. - :type num_executors: int + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param sql_reader_query: SQL Data Warehouse reader query. Type: string (or Expression with + resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Data Warehouse + source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression + with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored procedure parameters. + Example: "{Parameter1: {value: "1", type: "int"}}". Type: object (or Expression with resultType + object), itemType: StoredProcedureParameter. + :type stored_procedure_parameters: object """ _validation = { - 'file': {'required': True}, - 'driver_memory': {'required': True}, - 'driver_cores': {'required': True}, - 'executor_memory': {'required': True}, - 'executor_cores': {'required': True}, - 'num_executors': {'required': True}, + 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'file': {'key': 'file', 'type': 'str'}, - 'class_name': {'key': 'className', 'type': 'str'}, - 'conf': {'key': 'conf', 'type': 'object'}, - 'args': {'key': 'args', 'type': '[str]'}, - 'jars': {'key': 'jars', 'type': '[str]'}, - 'files': {'key': 'files', 'type': '[str]'}, - 'archives': {'key': 'archives', 'type': '[str]'}, - 'driver_memory': {'key': 'driverMemory', 'type': 'str'}, - 'driver_cores': {'key': 'driverCores', 'type': 'int'}, - 'executor_memory': {'key': 'executorMemory', 'type': 'str'}, - 'executor_cores': {'key': 'executorCores', 'type': 'int'}, - 'num_executors': {'key': 'numExecutors', 'type': 'int'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': 'object'}, } def __init__( self, *, - file: str, - driver_memory: str, - driver_cores: int, - executor_memory: str, - executor_cores: int, - num_executors: int, additional_properties: Optional[Dict[str, object]] = None, - name: Optional[str] = None, - class_name: Optional[str] = None, - conf: Optional[object] = None, - args: Optional[List[str]] = None, - jars: Optional[List[str]] = None, - files: Optional[List[str]] = None, - archives: Optional[List[str]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + sql_reader_query: Optional[object] = None, + sql_reader_stored_procedure_name: Optional[object] = None, + stored_procedure_parameters: Optional[object] = None, **kwargs ): - super(SparkJobProperties, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.name = name - self.file = file - self.class_name = class_name - self.conf = conf - self.args = args - self.jars = jars - self.files = files - self.archives = archives - self.driver_memory = driver_memory - self.driver_cores = driver_cores - self.executor_memory = executor_memory - self.executor_cores = executor_cores - self.num_executors = num_executors + super(SqlDWSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'SqlDWSource' # type: str + self.sql_reader_query = sql_reader_query + self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name + self.stored_procedure_parameters = stored_procedure_parameters -class SparkLinkedService(LinkedService): - """Spark Server linked service. +class SqlMISink(CopySink): + """A copy activity Azure SQL Managed Instance sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Copy sink type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param host: Required. IP address or host name of the Spark server. - :type host: object - :param port: Required. The TCP port that the Spark server uses to listen for client - connections. - :type port: object - :param server_type: The type of Spark server. Possible values include: "SharkServer", - "SharkServer2", "SparkThriftServer". - :type server_type: str or ~azure.synapse.artifacts.models.SparkServerType - :param thrift_transport_protocol: The transport protocol to use in the Thrift layer. Possible - values include: "Binary", "SASL", "HTTP ". - :type thrift_transport_protocol: str or - ~azure.synapse.artifacts.models.SparkThriftTransportProtocol - :param authentication_type: Required. The authentication method used to access the Spark - server. Possible values include: "Anonymous", "Username", "UsernameAndPassword", - "WindowsAzureHDInsightService". - :type authentication_type: str or ~azure.synapse.artifacts.models.SparkAuthenticationType - :param username: The user name that you use to access Spark Server. - :type username: object - :param password: The password corresponding to the user name that you provided in the Username - field. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param http_path: The partial URL corresponding to the Spark server. - :type http_path: object - :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The - default value is false. - :type enable_ssl: object - :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for - verifying the server when connecting over SSL. This property can only be set when using SSL on - self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :type trusted_cert_path: object - :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust - store or from a specified PEM file. The default value is false. - :type use_system_trust_store: object - :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate - name to match the host name of the server when connecting over SSL. The default value is false. - :type allow_host_name_cn_mismatch: object - :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from - the server. The default value is false. - :type allow_self_signed_server_cert: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or + Expression with resultType string). + :type sql_writer_stored_procedure_name: object + :param sql_writer_table_type: SQL writer table type. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :type sql_writer_table_type: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType + string). + :type pre_copy_script: object + :param stored_procedure_parameters: SQL stored procedure parameters. + :type stored_procedure_parameters: dict[str, + ~azure.synapse.artifacts.models.StoredProcedureParameter] + :param stored_procedure_table_type_parameter_name: The stored procedure parameter name of the + table type. Type: string (or Expression with resultType string). + :type stored_procedure_table_type_parameter_name: object + :param table_option: The option to handle sink table, such as autoCreate. For now only + 'autoCreate' value is supported. Type: string (or Expression with resultType string). + :type table_option: object """ _validation = { 'type': {'required': True}, - 'host': {'required': True}, - 'port': {'required': True}, - 'authentication_type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'port': {'key': 'typeProperties.port', 'type': 'object'}, - 'server_type': {'key': 'typeProperties.serverType', 'type': 'str'}, - 'thrift_transport_protocol': {'key': 'typeProperties.thriftTransportProtocol', 'type': 'str'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, - 'username': {'key': 'typeProperties.username', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, - 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, - 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, - 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, - 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, - 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, + 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, } def __init__( self, *, - host: object, - port: object, - authentication_type: Union[str, "SparkAuthenticationType"], additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, - description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - server_type: Optional[Union[str, "SparkServerType"]] = None, - thrift_transport_protocol: Optional[Union[str, "SparkThriftTransportProtocol"]] = None, - username: Optional[object] = None, - password: Optional["SecretBase"] = None, - http_path: Optional[object] = None, - enable_ssl: Optional[object] = None, - trusted_cert_path: Optional[object] = None, - use_system_trust_store: Optional[object] = None, - allow_host_name_cn_mismatch: Optional[object] = None, - allow_self_signed_server_cert: Optional[object] = None, - encrypted_credential: Optional[object] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + sql_writer_stored_procedure_name: Optional[object] = None, + sql_writer_table_type: Optional[object] = None, + pre_copy_script: Optional[object] = None, + stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None, + stored_procedure_table_type_parameter_name: Optional[object] = None, + table_option: Optional[object] = None, **kwargs ): - super(SparkLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Spark' - self.host = host - self.port = port - self.server_type = server_type - self.thrift_transport_protocol = thrift_transport_protocol - self.authentication_type = authentication_type - self.username = username - self.password = password - self.http_path = http_path - self.enable_ssl = enable_ssl - self.trusted_cert_path = trusted_cert_path - self.use_system_trust_store = use_system_trust_store - self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch - self.allow_self_signed_server_cert = allow_self_signed_server_cert - self.encrypted_credential = encrypted_credential + super(SqlMISink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'SqlMISink' # type: str + self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name + self.sql_writer_table_type = sql_writer_table_type + self.pre_copy_script = pre_copy_script + self.stored_procedure_parameters = stored_procedure_parameters + self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name + self.table_option = table_option -class SparkObjectDataset(Dataset): - """Spark Server dataset. +class SqlMISource(TabularSource): + """A copy activity Azure SQL Managed Instance source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of dataset.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: array (or Expression - with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the dataset. Type: array (or - Expression with resultType array), itemType: DatasetSchemaDataElement. - :type schema: object - :param linked_service_name: Required. Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param parameters: Parameters for dataset. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the Dataset. - :type annotations: list[object] - :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the - root level. - :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: This property will be retired. Please consider using schema + table - properties instead. - :type table_name: object - :param table: The table name of the Spark. Type: string (or Expression with resultType string). - :type table: object - :param schema_type_properties_schema: The schema name of the Spark. Type: string (or Expression - with resultType string). - :type schema_type_properties_schema: object + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for a Azure SQL Managed + Instance source. This cannot be used at the same time as SqlReaderQuery. Type: string (or + Expression with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored procedure parameters. + Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.synapse.artifacts.models.StoredProcedureParameter] + :param produce_additional_types: Which additional types to produce. + :type produce_additional_types: object """ _validation = { 'type': {'required': True}, - 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'structure': {'key': 'structure', 'type': 'object'}, - 'schema': {'key': 'schema', 'type': 'object'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, - 'table': {'key': 'typeProperties.table', 'type': 'object'}, - 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, } def __init__( self, *, - linked_service_name: "LinkedServiceReference", additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - structure: Optional[object] = None, - schema: Optional[object] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, - table: Optional[object] = None, - schema_type_properties_schema: Optional[object] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + sql_reader_query: Optional[object] = None, + sql_reader_stored_procedure_name: Optional[object] = None, + stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None, + produce_additional_types: Optional[object] = None, **kwargs ): - super(SparkObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'SparkObject' - self.table_name = table_name - self.table = table - self.schema_type_properties_schema = schema_type_properties_schema - - -class SparkRequest(msrest.serialization.Model): - """SparkRequest. - - :param name: - :type name: str - :param file: - :type file: str - :param class_name: - :type class_name: str - :param arguments: - :type arguments: list[str] - :param jars: - :type jars: list[str] - :param python_files: - :type python_files: list[str] - :param files: - :type files: list[str] - :param archives: - :type archives: list[str] - :param configuration: Dictionary of :code:``. - :type configuration: dict[str, str] - :param driver_memory: - :type driver_memory: str - :param driver_cores: - :type driver_cores: int - :param executor_memory: - :type executor_memory: str - :param executor_cores: - :type executor_cores: int - :param executor_count: - :type executor_count: int - """ + super(SqlMISource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'SqlMISource' # type: str + self.sql_reader_query = sql_reader_query + self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name + self.stored_procedure_parameters = stored_procedure_parameters + self.produce_additional_types = produce_additional_types - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'file': {'key': 'file', 'type': 'str'}, - 'class_name': {'key': 'className', 'type': 'str'}, - 'arguments': {'key': 'args', 'type': '[str]'}, - 'jars': {'key': 'jars', 'type': '[str]'}, - 'python_files': {'key': 'pyFiles', 'type': '[str]'}, - 'files': {'key': 'files', 'type': '[str]'}, - 'archives': {'key': 'archives', 'type': '[str]'}, - 'configuration': {'key': 'conf', 'type': '{str}'}, - 'driver_memory': {'key': 'driverMemory', 'type': 'str'}, - 'driver_cores': {'key': 'driverCores', 'type': 'int'}, - 'executor_memory': {'key': 'executorMemory', 'type': 'str'}, - 'executor_cores': {'key': 'executorCores', 'type': 'int'}, - 'executor_count': {'key': 'numExecutors', 'type': 'int'}, - } - def __init__( - self, - *, - name: Optional[str] = None, - file: Optional[str] = None, - class_name: Optional[str] = None, - arguments: Optional[List[str]] = None, - jars: Optional[List[str]] = None, - python_files: Optional[List[str]] = None, - files: Optional[List[str]] = None, - archives: Optional[List[str]] = None, - configuration: Optional[Dict[str, str]] = None, - driver_memory: Optional[str] = None, - driver_cores: Optional[int] = None, - executor_memory: Optional[str] = None, - executor_cores: Optional[int] = None, - executor_count: Optional[int] = None, - **kwargs - ): - super(SparkRequest, self).__init__(**kwargs) - self.name = name - self.file = file - self.class_name = class_name - self.arguments = arguments - self.jars = jars - self.python_files = python_files - self.files = files - self.archives = archives - self.configuration = configuration - self.driver_memory = driver_memory - self.driver_cores = driver_cores - self.executor_memory = executor_memory - self.executor_cores = executor_cores - self.executor_count = executor_count +class SqlPool(TrackedResource): + """A SQL Analytics pool. + Variables are only populated by the server, and will be ignored when sending a request. -class SparkScheduler(msrest.serialization.Model): - """SparkScheduler. + All required parameters must be populated in order to send to Azure. - :param submitted_at: - :type submitted_at: ~datetime.datetime - :param scheduled_at: - :type scheduled_at: ~datetime.datetime - :param ended_at: - :type ended_at: ~datetime.datetime - :param cancellation_requested_at: - :type cancellation_requested_at: ~datetime.datetime - :param current_state: Possible values include: "Queued", "Scheduled", "Ended". - :type current_state: str or ~azure.synapse.artifacts.models.SchedulerCurrentState + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + :param tags: A set of tags. Resource tags. + :type tags: dict[str, str] + :param location: Required. The geo-location where the resource lives. + :type location: str + :param sku: SQL pool SKU. + :type sku: ~azure.synapse.artifacts.models.Sku + :param max_size_bytes: Maximum size in bytes. + :type max_size_bytes: long + :param collation: Collation mode. + :type collation: str + :param source_database_id: Source database to create from. + :type source_database_id: str + :param recoverable_database_id: Backup database to restore from. + :type recoverable_database_id: str + :param provisioning_state: Resource state. + :type provisioning_state: str + :param status: Resource status. + :type status: str + :param restore_point_in_time: Snapshot time to restore. + :type restore_point_in_time: ~datetime.datetime + :param create_mode: What is this?. + :type create_mode: str + :param creation_date: Date the SQL pool was created. + :type creation_date: ~datetime.datetime """ + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'location': {'required': True}, + } + _attribute_map = { - 'submitted_at': {'key': 'submittedAt', 'type': 'iso-8601'}, - 'scheduled_at': {'key': 'scheduledAt', 'type': 'iso-8601'}, - 'ended_at': {'key': 'endedAt', 'type': 'iso-8601'}, - 'cancellation_requested_at': {'key': 'cancellationRequestedAt', 'type': 'iso-8601'}, - 'current_state': {'key': 'currentState', 'type': 'str'}, + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'location': {'key': 'location', 'type': 'str'}, + 'sku': {'key': 'sku', 'type': 'Sku'}, + 'max_size_bytes': {'key': 'properties.maxSizeBytes', 'type': 'long'}, + 'collation': {'key': 'properties.collation', 'type': 'str'}, + 'source_database_id': {'key': 'properties.sourceDatabaseId', 'type': 'str'}, + 'recoverable_database_id': {'key': 'properties.recoverableDatabaseId', 'type': 'str'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + 'status': {'key': 'properties.status', 'type': 'str'}, + 'restore_point_in_time': {'key': 'properties.restorePointInTime', 'type': 'iso-8601'}, + 'create_mode': {'key': 'properties.createMode', 'type': 'str'}, + 'creation_date': {'key': 'properties.creationDate', 'type': 'iso-8601'}, } def __init__( self, *, - submitted_at: Optional[datetime.datetime] = None, - scheduled_at: Optional[datetime.datetime] = None, - ended_at: Optional[datetime.datetime] = None, - cancellation_requested_at: Optional[datetime.datetime] = None, - current_state: Optional[Union[str, "SchedulerCurrentState"]] = None, - **kwargs - ): - super(SparkScheduler, self).__init__(**kwargs) - self.submitted_at = submitted_at - self.scheduled_at = scheduled_at - self.ended_at = ended_at - self.cancellation_requested_at = cancellation_requested_at - self.current_state = current_state + location: str, + tags: Optional[Dict[str, str]] = None, + sku: Optional["Sku"] = None, + max_size_bytes: Optional[int] = None, + collation: Optional[str] = None, + source_database_id: Optional[str] = None, + recoverable_database_id: Optional[str] = None, + provisioning_state: Optional[str] = None, + status: Optional[str] = None, + restore_point_in_time: Optional[datetime.datetime] = None, + create_mode: Optional[str] = None, + creation_date: Optional[datetime.datetime] = None, + **kwargs + ): + super(SqlPool, self).__init__(tags=tags, location=location, **kwargs) + self.sku = sku + self.max_size_bytes = max_size_bytes + self.collation = collation + self.source_database_id = source_database_id + self.recoverable_database_id = recoverable_database_id + self.provisioning_state = provisioning_state + self.status = status + self.restore_point_in_time = restore_point_in_time + self.create_mode = create_mode + self.creation_date = creation_date -class SparkServiceError(msrest.serialization.Model): - """SparkServiceError. +class SqlPoolInfoListResult(msrest.serialization.Model): + """List of SQL pools. - :param message: - :type message: str - :param error_code: - :type error_code: str - :param source: Possible values include: "System", "User", "Unknown", "Dependency". - :type source: str or ~azure.synapse.artifacts.models.SparkErrorSource + :param next_link: Link to the next page of results. + :type next_link: str + :param value: List of SQL pools. + :type value: list[~azure.synapse.artifacts.models.SqlPool] """ _attribute_map = { - 'message': {'key': 'message', 'type': 'str'}, - 'error_code': {'key': 'errorCode', 'type': 'str'}, - 'source': {'key': 'source', 'type': 'str'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'value': {'key': 'value', 'type': '[SqlPool]'}, } def __init__( self, *, - message: Optional[str] = None, - error_code: Optional[str] = None, - source: Optional[Union[str, "SparkErrorSource"]] = None, + next_link: Optional[str] = None, + value: Optional[List["SqlPool"]] = None, **kwargs ): - super(SparkServiceError, self).__init__(**kwargs) - self.message = message - self.error_code = error_code - self.source = source + super(SqlPoolInfoListResult, self).__init__(**kwargs) + self.next_link = next_link + self.value = value -class SparkServicePlugin(msrest.serialization.Model): - """SparkServicePlugin. +class SqlPoolReference(msrest.serialization.Model): + """SQL pool reference type. - :param preparation_started_at: - :type preparation_started_at: ~datetime.datetime - :param resource_acquisition_started_at: - :type resource_acquisition_started_at: ~datetime.datetime - :param submission_started_at: - :type submission_started_at: ~datetime.datetime - :param monitoring_started_at: - :type monitoring_started_at: ~datetime.datetime - :param cleanup_started_at: - :type cleanup_started_at: ~datetime.datetime - :param current_state: Possible values include: "Preparation", "ResourceAcquisition", "Queued", - "Submission", "Monitoring", "Cleanup", "Ended". - :type current_state: str or ~azure.synapse.artifacts.models.PluginCurrentState + All required parameters must be populated in order to send to Azure. + + :param type: Required. SQL pool reference type. Possible values include: "SqlPoolReference". + :type type: str or ~azure.synapse.artifacts.models.SqlPoolReferenceType + :param reference_name: Required. Reference SQL pool name. + :type reference_name: str """ + _validation = { + 'type': {'required': True}, + 'reference_name': {'required': True}, + } + _attribute_map = { - 'preparation_started_at': {'key': 'preparationStartedAt', 'type': 'iso-8601'}, - 'resource_acquisition_started_at': {'key': 'resourceAcquisitionStartedAt', 'type': 'iso-8601'}, - 'submission_started_at': {'key': 'submissionStartedAt', 'type': 'iso-8601'}, - 'monitoring_started_at': {'key': 'monitoringStartedAt', 'type': 'iso-8601'}, - 'cleanup_started_at': {'key': 'cleanupStartedAt', 'type': 'iso-8601'}, - 'current_state': {'key': 'currentState', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, } def __init__( self, *, - preparation_started_at: Optional[datetime.datetime] = None, - resource_acquisition_started_at: Optional[datetime.datetime] = None, - submission_started_at: Optional[datetime.datetime] = None, - monitoring_started_at: Optional[datetime.datetime] = None, - cleanup_started_at: Optional[datetime.datetime] = None, - current_state: Optional[Union[str, "PluginCurrentState"]] = None, + type: Union[str, "SqlPoolReferenceType"], + reference_name: str, **kwargs ): - super(SparkServicePlugin, self).__init__(**kwargs) - self.preparation_started_at = preparation_started_at - self.resource_acquisition_started_at = resource_acquisition_started_at - self.submission_started_at = submission_started_at - self.monitoring_started_at = monitoring_started_at - self.cleanup_started_at = cleanup_started_at - self.current_state = current_state + super(SqlPoolReference, self).__init__(**kwargs) + self.type = type + self.reference_name = reference_name -class SqlConnection(msrest.serialization.Model): - """The connection used to execute the SQL script. +class SqlPoolStoredProcedureActivity(Activity): + """Execute SQL pool stored procedure activity. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. The type of the connection. Possible values include: "SqlOnDemand", - "SqlPool". - :type type: str or ~azure.synapse.artifacts.models.SqlConnectionType - :param name: Required. The identifier of the connection. + :param name: Required. Activity name. :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param sql_pool: Required. SQL pool stored procedure reference. + :type sql_pool: ~azure.synapse.artifacts.models.SqlPoolReference + :param stored_procedure_name: Required. Stored procedure name. Type: string (or Expression with + resultType string). + :type stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored procedure parameters. + Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.synapse.artifacts.models.StoredProcedureParameter] """ _validation = { - 'type': {'required': True}, 'name': {'required': True}, + 'type': {'required': True}, + 'sql_pool': {'required': True}, + 'stored_procedure_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'sql_pool': {'key': 'sqlPool', 'type': 'SqlPoolReference'}, + 'stored_procedure_name': {'key': 'typeProperties.storedProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'typeProperties.storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, } def __init__( self, *, - type: Union[str, "SqlConnectionType"], name: str, + sql_pool: "SqlPoolReference", + stored_procedure_name: object, additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + depends_on: Optional[List["ActivityDependency"]] = None, + user_properties: Optional[List["UserProperty"]] = None, + stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None, **kwargs ): - super(SqlConnection, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.type = type - self.name = name + super(SqlPoolStoredProcedureActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.type = 'SqlPoolStoredProcedure' # type: str + self.sql_pool = sql_pool + self.stored_procedure_name = stored_procedure_name + self.stored_procedure_parameters = stored_procedure_parameters class SqlScript(msrest.serialization.Model): """SQL script. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this @@ -20277,14 +31752,13 @@ class SqlScript(msrest.serialization.Model): :type additional_properties: dict[str, object] :param description: The description of the SQL script. :type description: str - :ivar type: The type of the SQL script. Default value: "SqlQuery". - :vartype type: str + :param type: The type of the SQL script. Possible values include: "SqlQuery". + :type type: str or ~azure.synapse.artifacts.models.SqlScriptType :param content: Required. The content of the SQL script. :type content: ~azure.synapse.artifacts.models.SqlScriptContent """ _validation = { - 'type': {'constant': True}, 'content': {'required': True}, } @@ -20295,19 +31769,19 @@ class SqlScript(msrest.serialization.Model): 'content': {'key': 'content', 'type': 'SqlScriptContent'}, } - type = "SqlQuery" - def __init__( self, *, content: "SqlScriptContent", additional_properties: Optional[Dict[str, object]] = None, description: Optional[str] = None, + type: Optional[Union[str, "SqlScriptType"]] = None, **kwargs ): super(SqlScript, self).__init__(**kwargs) self.additional_properties = additional_properties self.description = description + self.type = type self.content = content @@ -20382,20 +31856,22 @@ def __init__( self.language = language -class SqlScriptResource(SubResource): +class SqlScriptResource(AzureEntityResource): """Sql Script resource type. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :ivar id: The resource identifier. + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. :vartype id: str - :ivar name: The resource name. + :ivar name: The name of the resource. :vartype name: str - :ivar type: The resource type. + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. :vartype type: str - :ivar etag: Etag identifies change in the resource. + :ivar etag: Resource Etag. :vartype etag: str :param properties: Required. Properties of sql script. :type properties: ~azure.synapse.artifacts.models.SqlScript @@ -20439,96 +31915,263 @@ class SqlScriptsListResponse(msrest.serialization.Model): """ _validation = { - 'value': {'required': True}, + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[SqlScriptResource]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + *, + value: List["SqlScriptResource"], + next_link: Optional[str] = None, + **kwargs + ): + super(SqlScriptsListResponse, self).__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class SqlServerLinkedService(LinkedService): + """SQL Server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param connection_string: Required. The connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param user_name: The on-premises Windows authentication user name. Type: string (or Expression + with resultType string). + :type user_name: object + :param password: The on-premises Windows authentication password. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + *, + connection_string: object, + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + user_name: Optional[object] = None, + password: Optional["SecretBase"] = None, + encrypted_credential: Optional[object] = None, + **kwargs + ): + super(SqlServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'SqlServer' # type: str + self.connection_string = connection_string + self.user_name = user_name + self.password = password + self.encrypted_credential = encrypted_credential + + +class SqlServerSink(CopySink): + """A copy activity SQL server sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or + Expression with resultType string). + :type sql_writer_stored_procedure_name: object + :param sql_writer_table_type: SQL writer table type. Type: string (or Expression with + resultType string). + :type sql_writer_table_type: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType + string). + :type pre_copy_script: object + :param stored_procedure_parameters: SQL stored procedure parameters. + :type stored_procedure_parameters: dict[str, + ~azure.synapse.artifacts.models.StoredProcedureParameter] + :param stored_procedure_table_type_parameter_name: The stored procedure parameter name of the + table type. Type: string (or Expression with resultType string). + :type stored_procedure_table_type_parameter_name: object + :param table_option: The option to handle sink table, such as autoCreate. For now only + 'autoCreate' value is supported. Type: string (or Expression with resultType string). + :type table_option: object + """ + + _validation = { + 'type': {'required': True}, } _attribute_map = { - 'value': {'key': 'value', 'type': '[SqlScriptResource]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, + 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, } def __init__( self, *, - value: List["SqlScriptResource"], - next_link: Optional[str] = None, + additional_properties: Optional[Dict[str, object]] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + sql_writer_stored_procedure_name: Optional[object] = None, + sql_writer_table_type: Optional[object] = None, + pre_copy_script: Optional[object] = None, + stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None, + stored_procedure_table_type_parameter_name: Optional[object] = None, + table_option: Optional[object] = None, **kwargs ): - super(SqlScriptsListResponse, self).__init__(**kwargs) - self.value = value - self.next_link = next_link + super(SqlServerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'SqlServerSink' # type: str + self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name + self.sql_writer_table_type = sql_writer_table_type + self.pre_copy_script = pre_copy_script + self.stored_procedure_parameters = stored_procedure_parameters + self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name + self.table_option = table_option -class SqlServerLinkedService(LinkedService): - """SQL Server linked service. +class SqlServerSource(TabularSource): + """A copy activity SQL server source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Copy source type.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param connection_string: Required. The connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. - :type connection_string: object - :param user_name: The on-premises Windows authentication user name. Type: string (or Expression + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database + source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). - :type user_name: object - :param password: The on-premises Windows authentication password. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored procedure parameters. + Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.synapse.artifacts.models.StoredProcedureParameter] + :param produce_additional_types: Which additional types to produce. + :type produce_additional_types: object """ _validation = { 'type': {'required': True}, - 'connection_string': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, } def __init__( self, *, - connection_string: object, additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, - description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - user_name: Optional[object] = None, - password: Optional["SecretBase"] = None, - encrypted_credential: Optional[object] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + sql_reader_query: Optional[object] = None, + sql_reader_stored_procedure_name: Optional[object] = None, + stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None, + produce_additional_types: Optional[object] = None, **kwargs ): - super(SqlServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'SqlServer' - self.connection_string = connection_string - self.user_name = user_name - self.password = password - self.encrypted_credential = encrypted_credential + super(SqlServerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'SqlServerSource' # type: str + self.sql_reader_query = sql_reader_query + self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name + self.stored_procedure_parameters = stored_procedure_parameters + self.produce_additional_types = produce_additional_types class SqlServerStoredProcedureActivity(ExecutionActivity): @@ -20596,7 +32239,7 @@ def __init__( **kwargs ): super(SqlServerStoredProcedureActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type: str = 'SqlServerStoredProcedure' + self.type = 'SqlServerStoredProcedure' # type: str self.stored_procedure_name = stored_procedure_name self.stored_procedure_parameters = stored_procedure_parameters @@ -20676,12 +32319,174 @@ def __init__( **kwargs ): super(SqlServerTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'SqlServerTable' + self.type = 'SqlServerTable' # type: str self.table_name = table_name self.schema_type_properties_schema = schema_type_properties_schema self.table = table +class SqlSink(CopySink): + """A copy activity SQL sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or + Expression with resultType string). + :type sql_writer_stored_procedure_name: object + :param sql_writer_table_type: SQL writer table type. Type: string (or Expression with + resultType string). + :type sql_writer_table_type: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType + string). + :type pre_copy_script: object + :param stored_procedure_parameters: SQL stored procedure parameters. + :type stored_procedure_parameters: dict[str, + ~azure.synapse.artifacts.models.StoredProcedureParameter] + :param stored_procedure_table_type_parameter_name: The stored procedure parameter name of the + table type. Type: string (or Expression with resultType string). + :type stored_procedure_table_type_parameter_name: object + :param table_option: The option to handle sink table, such as autoCreate. For now only + 'autoCreate' value is supported. Type: string (or Expression with resultType string). + :type table_option: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, + 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + sql_writer_stored_procedure_name: Optional[object] = None, + sql_writer_table_type: Optional[object] = None, + pre_copy_script: Optional[object] = None, + stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None, + stored_procedure_table_type_parameter_name: Optional[object] = None, + table_option: Optional[object] = None, + **kwargs + ): + super(SqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'SqlSink' # type: str + self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name + self.sql_writer_table_type = sql_writer_table_type + self.pre_copy_script = pre_copy_script + self.stored_procedure_parameters = stored_procedure_parameters + self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name + self.table_option = table_option + + +class SqlSource(TabularSource): + """A copy activity SQL source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database + source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression + with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored procedure parameters. + Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.synapse.artifacts.models.StoredProcedureParameter] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + sql_reader_query: Optional[object] = None, + sql_reader_stored_procedure_name: Optional[object] = None, + stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None, + **kwargs + ): + super(SqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'SqlSource' # type: str + self.sql_reader_query = sql_reader_query + self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name + self.stored_procedure_parameters = stored_procedure_parameters + + class SquareLinkedService(LinkedService): """Square Service linked service. @@ -20768,7 +32573,7 @@ def __init__( **kwargs ): super(SquareLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Square' + self.type = 'Square' # type: str self.host = host self.client_id = client_id self.client_secret = client_secret @@ -20842,9 +32647,66 @@ def __init__( table_name: Optional[object] = None, **kwargs ): - super(SquareObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'SquareObject' - self.table_name = table_name + super(SquareObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'SquareObject' # type: str + self.table_name = table_name + + +class SquareSource(TabularSource): + """A copy activity Square Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(SquareSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'SquareSource' # type: str + self.query = query class SSISAccessCredential(msrest.serialization.Model): @@ -21001,15 +32863,13 @@ def __init__( class SSISLogLocation(msrest.serialization.Model): """SSIS package execution log location. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param log_path: Required. The SSIS package execution log path. Type: string (or Expression with resultType string). :type log_path: object - :ivar type: Required. The type of SSIS log location. Default value: "File". - :vartype type: str + :param type: Required. The type of SSIS log location. Possible values include: "File". + :type type: str or ~azure.synapse.artifacts.models.SsisLogLocationType :param access_credential: The package execution log access credential. :type access_credential: ~azure.synapse.artifacts.models.SSISAccessCredential :param log_refresh_interval: Specifies the interval to refresh log. The default interval is 5 @@ -21020,7 +32880,7 @@ class SSISLogLocation(msrest.serialization.Model): _validation = { 'log_path': {'required': True}, - 'type': {'required': True, 'constant': True}, + 'type': {'required': True}, } _attribute_map = { @@ -21030,18 +32890,18 @@ class SSISLogLocation(msrest.serialization.Model): 'log_refresh_interval': {'key': 'typeProperties.logRefreshInterval', 'type': 'object'}, } - type = "File" - def __init__( self, *, log_path: object, + type: Union[str, "SsisLogLocationType"], access_credential: Optional["SSISAccessCredential"] = None, log_refresh_interval: Optional[object] = None, **kwargs ): super(SSISLogLocation, self).__init__(**kwargs) self.log_path = log_path + self.type = type self.access_credential = access_credential self.log_refresh_interval = log_refresh_interval @@ -21326,6 +33186,44 @@ def __init__( self.type = type +class SubResource(AzureEntityResource): + """Azure Synapse nested resource, which belongs to a workspace. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + :ivar etag: Resource Etag. + :vartype etag: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(SubResource, self).__init__(**kwargs) + + class SwitchActivity(Activity): """This activity evaluates an expression and executes activities under the cases property that correspond to the expression evaluation expected in the equals property. @@ -21388,7 +33286,7 @@ def __init__( **kwargs ): super(SwitchActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.type: str = 'Switch' + self.type = 'Switch' # type: str self.on = on self.cases = cases self.default_activities = default_activities @@ -21500,7 +33398,7 @@ def __init__( **kwargs ): super(SybaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Sybase' + self.type = 'Sybase' # type: str self.server = server self.database = database self.schema = schema @@ -21510,6 +33408,62 @@ def __init__( self.encrypted_credential = encrypted_credential +class SybaseSource(TabularSource): + """A copy activity source for Sybase databases. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: Database query. Type: string (or Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(SybaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'SybaseSource' # type: str + self.query = query + + class SybaseTableDataset(Dataset): """The Sybase table dataset. @@ -21573,9 +33527,257 @@ def __init__( table_name: Optional[object] = None, **kwargs ): - super(SybaseTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'SybaseTable' - self.table_name = table_name + super(SybaseTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'SybaseTable' # type: str + self.table_name = table_name + + +class SynapseNotebookActivity(Activity): + """Execute Synapse notebook activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param notebook: Required. Synapse notebook reference. + :type notebook: ~azure.synapse.artifacts.models.SynapseNotebookReference + :param parameters: Notebook parameters. + :type parameters: dict[str, object] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'notebook': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'notebook': {'key': 'typeProperties.notebook', 'type': 'SynapseNotebookReference'}, + 'parameters': {'key': 'typeProperties.parameters', 'type': '{object}'}, + } + + def __init__( + self, + *, + name: str, + notebook: "SynapseNotebookReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + depends_on: Optional[List["ActivityDependency"]] = None, + user_properties: Optional[List["UserProperty"]] = None, + parameters: Optional[Dict[str, object]] = None, + **kwargs + ): + super(SynapseNotebookActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.type = 'SynapseNotebook' # type: str + self.notebook = notebook + self.parameters = parameters + + +class SynapseNotebookReference(msrest.serialization.Model): + """Synapse notebook reference type. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Synapse notebook reference type. Possible values include: + "NotebookReference". + :type type: str or ~azure.synapse.artifacts.models.NotebookReferenceType + :param reference_name: Required. Reference notebook name. + :type reference_name: str + """ + + _validation = { + 'type': {'required': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + } + + def __init__( + self, + *, + type: Union[str, "NotebookReferenceType"], + reference_name: str, + **kwargs + ): + super(SynapseNotebookReference, self).__init__(**kwargs) + self.type = type + self.reference_name = reference_name + + +class SynapseSparkJobDefinitionActivity(Activity): + """Execute spark job activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param spark_job: Required. Synapse spark job reference. + :type spark_job: ~azure.synapse.artifacts.models.SynapseSparkJobReference + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'spark_job': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'spark_job': {'key': 'typeProperties.sparkJob', 'type': 'SynapseSparkJobReference'}, + } + + def __init__( + self, + *, + name: str, + spark_job: "SynapseSparkJobReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + depends_on: Optional[List["ActivityDependency"]] = None, + user_properties: Optional[List["UserProperty"]] = None, + **kwargs + ): + super(SynapseSparkJobDefinitionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.type = 'SparkJob' # type: str + self.spark_job = spark_job + + +class SynapseSparkJobReference(msrest.serialization.Model): + """Synapse spark job reference type. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Synapse spark job reference type. Possible values include: + "SparkJobDefinitionReference". + :type type: str or ~azure.synapse.artifacts.models.SparkJobReferenceType + :param reference_name: Required. Reference spark job name. + :type reference_name: str + """ + + _validation = { + 'type': {'required': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + } + + def __init__( + self, + *, + type: Union[str, "SparkJobReferenceType"], + reference_name: str, + **kwargs + ): + super(SynapseSparkJobReference, self).__init__(**kwargs) + self.type = type + self.reference_name = reference_name + + +class TabularTranslator(CopyTranslator): + """A copy activity tabular translator. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy translator type.Constant filled by server. + :type type: str + :param column_mappings: Column mappings. Example: "UserId: MyUserId, Group: MyGroup, Name: + MyName" Type: string (or Expression with resultType string). This property will be retired. + Please use mappings property. + :type column_mappings: object + :param schema_mapping: The schema mapping to map between tabular data and hierarchical data. + Example: {"Column1": "$.Column1", "Column2": "$.Column2.Property1", "Column3": + "$.Column2.Property2"}. Type: object (or Expression with resultType object). This property will + be retired. Please use mappings property. + :type schema_mapping: object + :param collection_reference: The JSON Path of the Nested Array that is going to do cross-apply. + Type: object (or Expression with resultType object). + :type collection_reference: object + :param map_complex_values_to_string: Whether to map complex (array and object) values to simple + strings in json format. Type: boolean (or Expression with resultType boolean). + :type map_complex_values_to_string: object + :param mappings: Column mappings with logical types. Tabular->tabular example: + [{"source":{"name":"CustomerName","type":"String"},"sink":{"name":"ClientName","type":"String"}},{"source":{"name":"CustomerAddress","type":"String"},"sink":{"name":"ClientAddress","type":"String"}}]. + Hierarchical->tabular example: + [{"source":{"path":"$.CustomerName","type":"String"},"sink":{"name":"ClientName","type":"String"}},{"source":{"path":"$.CustomerAddress","type":"String"},"sink":{"name":"ClientAddress","type":"String"}}]. + Type: object (or Expression with resultType object). + :type mappings: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'column_mappings': {'key': 'columnMappings', 'type': 'object'}, + 'schema_mapping': {'key': 'schemaMapping', 'type': 'object'}, + 'collection_reference': {'key': 'collectionReference', 'type': 'object'}, + 'map_complex_values_to_string': {'key': 'mapComplexValuesToString', 'type': 'object'}, + 'mappings': {'key': 'mappings', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + column_mappings: Optional[object] = None, + schema_mapping: Optional[object] = None, + collection_reference: Optional[object] = None, + map_complex_values_to_string: Optional[object] = None, + mappings: Optional[object] = None, + **kwargs + ): + super(TabularTranslator, self).__init__(additional_properties=additional_properties, **kwargs) + self.type = 'TabularTranslator' # type: str + self.column_mappings = column_mappings + self.schema_mapping = schema_mapping + self.collection_reference = collection_reference + self.map_complex_values_to_string = map_complex_values_to_string + self.mappings = mappings class TeradataLinkedService(LinkedService): @@ -21651,7 +33853,7 @@ def __init__( **kwargs ): super(TeradataLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Teradata' + self.type = 'Teradata' # type: str self.connection_string = connection_string self.server = server self.authentication_type = authentication_type @@ -21660,6 +33862,110 @@ def __init__( self.encrypted_credential = encrypted_credential +class TeradataPartitionSettings(msrest.serialization.Model): + """The settings that will be leveraged for teradata source partitioning. + + :param partition_column_name: The name of the column that will be used for proceeding range or + hash partitioning. Type: string (or Expression with resultType string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of column specified in partitionColumnName that + will be used for proceeding range partitioning. Type: string (or Expression with resultType + string). + :type partition_upper_bound: object + :param partition_lower_bound: The minimum value of column specified in partitionColumnName that + will be used for proceeding range partitioning. Type: string (or Expression with resultType + string). + :type partition_lower_bound: object + """ + + _attribute_map = { + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + } + + def __init__( + self, + *, + partition_column_name: Optional[object] = None, + partition_upper_bound: Optional[object] = None, + partition_lower_bound: Optional[object] = None, + **kwargs + ): + super(TeradataPartitionSettings, self).__init__(**kwargs) + self.partition_column_name = partition_column_name + self.partition_upper_bound = partition_upper_bound + self.partition_lower_bound = partition_lower_bound + + +class TeradataSource(TabularSource): + """A copy activity Teradata source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: Teradata query. Type: string (or Expression with resultType string). + :type query: object + :param partition_option: The partition mechanism that will be used for teradata read in + parallel. Possible values include: "None", "Hash", "DynamicRange". + :type partition_option: str or ~azure.synapse.artifacts.models.TeradataPartitionOption + :param partition_settings: The settings that will be leveraged for teradata source + partitioning. + :type partition_settings: ~azure.synapse.artifacts.models.TeradataPartitionSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'TeradataPartitionSettings'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + partition_option: Optional[Union[str, "TeradataPartitionOption"]] = None, + partition_settings: Optional["TeradataPartitionSettings"] = None, + **kwargs + ): + super(TeradataSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'TeradataSource' # type: str + self.query = query + self.partition_option = partition_option + self.partition_settings = partition_settings + + class TeradataTableDataset(Dataset): """The Teradata database dataset. @@ -21729,11 +34035,104 @@ def __init__( **kwargs ): super(TeradataTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'TeradataTable' + self.type = 'TeradataTable' # type: str self.database = database self.table = table +class TextFormat(DatasetStorageFormat): + """The data stored in text format. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage format.Constant filled by server. + :type type: str + :param serializer: Serializer. Type: string (or Expression with resultType string). + :type serializer: object + :param deserializer: Deserializer. Type: string (or Expression with resultType string). + :type deserializer: object + :param column_delimiter: The column delimiter. Type: string (or Expression with resultType + string). + :type column_delimiter: object + :param row_delimiter: The row delimiter. Type: string (or Expression with resultType string). + :type row_delimiter: object + :param escape_char: The escape character. Type: string (or Expression with resultType string). + :type escape_char: object + :param quote_char: The quote character. Type: string (or Expression with resultType string). + :type quote_char: object + :param null_value: The null value string. Type: string (or Expression with resultType string). + :type null_value: object + :param encoding_name: The code page name of the preferred encoding. If miss, the default value + is ΓÇ£utf-8ΓÇ¥, unless BOM denotes another Unicode encoding. Refer to the ΓÇ£NameΓÇ¥ column of + the table in the following link to set supported values: + https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with + resultType string). + :type encoding_name: object + :param treat_empty_as_null: Treat empty column values in the text file as null. The default + value is true. Type: boolean (or Expression with resultType boolean). + :type treat_empty_as_null: object + :param skip_line_count: The number of lines/rows to be skipped when parsing text files. The + default value is 0. Type: integer (or Expression with resultType integer). + :type skip_line_count: object + :param first_row_as_header: When used as input, treat the first row of data as headers. When + used as output,write the headers into the output as the first row of data. The default value is + false. Type: boolean (or Expression with resultType boolean). + :type first_row_as_header: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'serializer': {'key': 'serializer', 'type': 'object'}, + 'deserializer': {'key': 'deserializer', 'type': 'object'}, + 'column_delimiter': {'key': 'columnDelimiter', 'type': 'object'}, + 'row_delimiter': {'key': 'rowDelimiter', 'type': 'object'}, + 'escape_char': {'key': 'escapeChar', 'type': 'object'}, + 'quote_char': {'key': 'quoteChar', 'type': 'object'}, + 'null_value': {'key': 'nullValue', 'type': 'object'}, + 'encoding_name': {'key': 'encodingName', 'type': 'object'}, + 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, + 'skip_line_count': {'key': 'skipLineCount', 'type': 'object'}, + 'first_row_as_header': {'key': 'firstRowAsHeader', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + serializer: Optional[object] = None, + deserializer: Optional[object] = None, + column_delimiter: Optional[object] = None, + row_delimiter: Optional[object] = None, + escape_char: Optional[object] = None, + quote_char: Optional[object] = None, + null_value: Optional[object] = None, + encoding_name: Optional[object] = None, + treat_empty_as_null: Optional[object] = None, + skip_line_count: Optional[object] = None, + first_row_as_header: Optional[object] = None, + **kwargs + ): + super(TextFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs) + self.type = 'TextFormat' # type: str + self.column_delimiter = column_delimiter + self.row_delimiter = row_delimiter + self.escape_char = escape_char + self.quote_char = quote_char + self.null_value = null_value + self.encoding_name = encoding_name + self.treat_empty_as_null = treat_empty_as_null + self.skip_line_count = skip_line_count + self.first_row_as_header = first_row_as_header + + class TriggerDependencyProvisioningStatus(msrest.serialization.Model): """Defines the response of a provision trigger dependency operation. @@ -21767,6 +34166,45 @@ def __init__( self.provisioning_status = provisioning_status +class TriggerDependencyReference(DependencyReference): + """Trigger referenced dependency. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: TumblingWindowTriggerDependencyReference. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. The type of dependency reference.Constant filled by server. + :type type: str + :param reference_trigger: Required. Referenced trigger. + :type reference_trigger: ~azure.synapse.artifacts.models.TriggerReference + """ + + _validation = { + 'type': {'required': True}, + 'reference_trigger': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_trigger': {'key': 'referenceTrigger', 'type': 'TriggerReference'}, + } + + _subtype_map = { + 'type': {'TumblingWindowTriggerDependencyReference': 'TumblingWindowTriggerDependencyReference'} + } + + def __init__( + self, + *, + reference_trigger: "TriggerReference", + **kwargs + ): + super(TriggerDependencyReference, self).__init__(**kwargs) + self.type = 'TriggerDependencyReference' # type: str + self.reference_trigger = reference_trigger + + class TriggerListResponse(msrest.serialization.Model): """A list of trigger resources. @@ -21825,20 +34263,55 @@ def __init__( self.parameters = parameters -class TriggerResource(SubResource): +class TriggerReference(msrest.serialization.Model): + """Trigger reference type. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Trigger reference type. Possible values include: "TriggerReference". + :type type: str or ~azure.synapse.artifacts.models.TriggerReferenceType + :param reference_name: Required. Reference trigger name. + :type reference_name: str + """ + + _validation = { + 'type': {'required': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + } + + def __init__( + self, + *, + type: Union[str, "TriggerReferenceType"], + reference_name: str, + **kwargs + ): + super(TriggerReference, self).__init__(**kwargs) + self.type = type + self.reference_name = reference_name + + +class TriggerResource(AzureEntityResource): """Trigger resource type. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :ivar id: The resource identifier. + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. :vartype id: str - :ivar name: The resource name. + :ivar name: The name of the resource. :vartype name: str - :ivar type: The resource type. + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. :vartype type: str - :ivar etag: Etag identifies change in the resource. + :ivar etag: Resource Etag. :vartype etag: str :param properties: Required. Properties of the trigger. :type properties: ~azure.synapse.artifacts.models.Trigger @@ -21962,44 +34435,194 @@ class TriggerRunsQueryResponse(msrest.serialization.Model): def __init__( self, *, - value: List["TriggerRun"], - continuation_token: Optional[str] = None, + value: List["TriggerRun"], + continuation_token: Optional[str] = None, + **kwargs + ): + super(TriggerRunsQueryResponse, self).__init__(**kwargs) + self.value = value + self.continuation_token = continuation_token + + +class TriggerSubscriptionOperationStatus(msrest.serialization.Model): + """Defines the response of a trigger subscription operation. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar trigger_name: Trigger name. + :vartype trigger_name: str + :ivar status: Event Subscription Status. Possible values include: "Enabled", "Provisioning", + "Deprovisioning", "Disabled", "Unknown". + :vartype status: str or ~azure.synapse.artifacts.models.EventSubscriptionStatus + """ + + _validation = { + 'trigger_name': {'readonly': True}, + 'status': {'readonly': True}, + } + + _attribute_map = { + 'trigger_name': {'key': 'triggerName', 'type': 'str'}, + 'status': {'key': 'status', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(TriggerSubscriptionOperationStatus, self).__init__(**kwargs) + self.trigger_name = None + self.status = None + + +class TumblingWindowTrigger(Trigger): + """Trigger that schedules pipeline runs for all fixed time interval windows from a start time without gaps and also supports backfill scenarios (when start time is in the past). + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Trigger type.Constant filled by server. + :type type: str + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are + called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". + :vartype runtime_state: str or ~azure.synapse.artifacts.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the trigger. + :type annotations: list[object] + :param pipeline: Required. Pipeline for which runs are created when an event is fired for + trigger window that is ready. + :type pipeline: ~azure.synapse.artifacts.models.TriggerPipelineReference + :param frequency: Required. The frequency of the time windows. Possible values include: + "Minute", "Hour". + :type frequency: str or ~azure.synapse.artifacts.models.TumblingWindowFrequency + :param interval: Required. The interval of the time windows. The minimum interval allowed is 15 + Minutes. + :type interval: int + :param start_time: Required. The start time for the time period for the trigger during which + events are fired for windows that are ready. Only UTC time is currently supported. + :type start_time: ~datetime.datetime + :param end_time: The end time for the time period for the trigger during which events are fired + for windows that are ready. Only UTC time is currently supported. + :type end_time: ~datetime.datetime + :param delay: Specifies how long the trigger waits past due time before triggering new run. It + doesn't alter window start and end time. The default is 0. Type: string (or Expression with + resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type delay: object + :param max_concurrency: Required. The max number of parallel time windows (ready for execution) + for which a new run is triggered. + :type max_concurrency: int + :param retry_policy: Retry policy that will be applied for failed pipeline runs. + :type retry_policy: ~azure.synapse.artifacts.models.RetryPolicy + :param depends_on: Triggers that this trigger depends on. Only tumbling window triggers are + supported. + :type depends_on: list[~azure.synapse.artifacts.models.DependencyReference] + """ + + _validation = { + 'type': {'required': True}, + 'runtime_state': {'readonly': True}, + 'pipeline': {'required': True}, + 'frequency': {'required': True}, + 'interval': {'required': True}, + 'start_time': {'required': True}, + 'max_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'pipeline': {'key': 'pipeline', 'type': 'TriggerPipelineReference'}, + 'frequency': {'key': 'typeProperties.frequency', 'type': 'str'}, + 'interval': {'key': 'typeProperties.interval', 'type': 'int'}, + 'start_time': {'key': 'typeProperties.startTime', 'type': 'iso-8601'}, + 'end_time': {'key': 'typeProperties.endTime', 'type': 'iso-8601'}, + 'delay': {'key': 'typeProperties.delay', 'type': 'object'}, + 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, + 'retry_policy': {'key': 'typeProperties.retryPolicy', 'type': 'RetryPolicy'}, + 'depends_on': {'key': 'typeProperties.dependsOn', 'type': '[DependencyReference]'}, + } + + def __init__( + self, + *, + pipeline: "TriggerPipelineReference", + frequency: Union[str, "TumblingWindowFrequency"], + interval: int, + start_time: datetime.datetime, + max_concurrency: int, + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + annotations: Optional[List[object]] = None, + end_time: Optional[datetime.datetime] = None, + delay: Optional[object] = None, + retry_policy: Optional["RetryPolicy"] = None, + depends_on: Optional[List["DependencyReference"]] = None, **kwargs ): - super(TriggerRunsQueryResponse, self).__init__(**kwargs) - self.value = value - self.continuation_token = continuation_token + super(TumblingWindowTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) + self.type = 'TumblingWindowTrigger' # type: str + self.pipeline = pipeline + self.frequency = frequency + self.interval = interval + self.start_time = start_time + self.end_time = end_time + self.delay = delay + self.max_concurrency = max_concurrency + self.retry_policy = retry_policy + self.depends_on = depends_on -class TriggerSubscriptionOperationStatus(msrest.serialization.Model): - """Defines the response of a trigger subscription operation. +class TumblingWindowTriggerDependencyReference(TriggerDependencyReference): + """Referenced tumbling window trigger dependency. - Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. - :ivar trigger_name: Trigger name. - :vartype trigger_name: str - :ivar status: Event Subscription Status. Possible values include: "Enabled", "Provisioning", - "Deprovisioning", "Disabled", "Unknown". - :vartype status: str or ~azure.synapse.artifacts.models.EventSubscriptionStatus + :param type: Required. The type of dependency reference.Constant filled by server. + :type type: str + :param reference_trigger: Required. Referenced trigger. + :type reference_trigger: ~azure.synapse.artifacts.models.TriggerReference + :param offset: Timespan applied to the start time of a tumbling window when evaluating + dependency. + :type offset: str + :param size: The size of the window when evaluating the dependency. If undefined the frequency + of the tumbling window will be used. + :type size: str """ _validation = { - 'trigger_name': {'readonly': True}, - 'status': {'readonly': True}, + 'type': {'required': True}, + 'reference_trigger': {'required': True}, + 'offset': {'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, + 'size': {'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, } _attribute_map = { - 'trigger_name': {'key': 'triggerName', 'type': 'str'}, - 'status': {'key': 'status', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'reference_trigger': {'key': 'referenceTrigger', 'type': 'TriggerReference'}, + 'offset': {'key': 'offset', 'type': 'str'}, + 'size': {'key': 'size', 'type': 'str'}, } def __init__( self, + *, + reference_trigger: "TriggerReference", + offset: Optional[str] = None, + size: Optional[str] = None, **kwargs ): - super(TriggerSubscriptionOperationStatus, self).__init__(**kwargs) - self.trigger_name = None - self.status = None + super(TumblingWindowTriggerDependencyReference, self).__init__(reference_trigger=reference_trigger, **kwargs) + self.type = 'TumblingWindowTriggerDependencyReference' # type: str + self.offset = offset + self.size = size class UntilActivity(Activity): @@ -22066,7 +34689,7 @@ def __init__( **kwargs ): super(UntilActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.type: str = 'Until' + self.type = 'Until' # type: str self.expression = expression self.timeout = timeout self.activities = activities @@ -22179,7 +34802,7 @@ def __init__( **kwargs ): super(ValidationActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.type: str = 'Validation' + self.type = 'Validation' # type: str self.timeout = timeout self.sleep = sleep self.minimum_size = minimum_size @@ -22279,12 +34902,69 @@ def __init__( **kwargs ): super(VerticaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Vertica' + self.type = 'Vertica' # type: str self.connection_string = connection_string self.pwd = pwd self.encrypted_credential = encrypted_credential +class VerticaSource(TabularSource): + """A copy activity Vertica source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(VerticaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'VerticaSource' # type: str + self.query = query + + class VerticaTableDataset(Dataset): """Vertica dataset. @@ -22360,12 +35040,33 @@ def __init__( **kwargs ): super(VerticaTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'VerticaTable' + self.type = 'VerticaTable' # type: str self.table_name = table_name self.table = table self.schema_type_properties_schema = schema_type_properties_schema +class VirtualNetworkProfile(msrest.serialization.Model): + """Virtual Network Profile. + + :param compute_subnet_id: Subnet ID used for computes in workspace. + :type compute_subnet_id: str + """ + + _attribute_map = { + 'compute_subnet_id': {'key': 'computeSubnetId', 'type': 'str'}, + } + + def __init__( + self, + *, + compute_subnet_id: Optional[str] = None, + **kwargs + ): + super(VirtualNetworkProfile, self).__init__(**kwargs) + self.compute_subnet_id = compute_subnet_id + + class WaitActivity(Activity): """This activity suspends pipeline execution for the specified interval. @@ -22416,7 +35117,7 @@ def __init__( **kwargs ): super(WaitActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.type: str = 'Wait' + self.type = 'Wait' # type: str self.wait_time_in_seconds = wait_time_in_seconds @@ -22512,7 +35213,7 @@ def __init__( **kwargs ): super(WebActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type: str = 'WebActivity' + self.type = 'WebActivity' # type: str self.method = method self.url = url self.headers = headers @@ -22610,7 +35311,7 @@ def __init__( ): super(WebLinkedServiceTypeProperties, self).__init__(**kwargs) self.url = url - self.authentication_type: Optional[str] = None + self.authentication_type = None # type: Optional[str] class WebAnonymousAuthentication(WebLinkedServiceTypeProperties): @@ -22644,7 +35345,7 @@ def __init__( **kwargs ): super(WebAnonymousAuthentication, self).__init__(url=url, **kwargs) - self.authentication_type: str = 'Anonymous' + self.authentication_type = 'Anonymous' # type: str class WebBasicAuthentication(WebLinkedServiceTypeProperties): @@ -22689,7 +35390,7 @@ def __init__( **kwargs ): super(WebBasicAuthentication, self).__init__(url=url, **kwargs) - self.authentication_type: str = 'Basic' + self.authentication_type = 'Basic' # type: str self.username = username self.password = password @@ -22735,7 +35436,7 @@ def __init__( **kwargs ): super(WebClientCertificateAuthentication, self).__init__(url=url, **kwargs) - self.authentication_type: str = 'ClientCertificate' + self.authentication_type = 'ClientCertificate' # type: str self.pfx = pfx self.password = password @@ -22743,8 +35444,6 @@ def __init__( class WebHookActivity(Activity): """WebHook activity. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this @@ -22760,8 +35459,8 @@ class WebHookActivity(Activity): :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :ivar method: Required. Rest API method for target endpoint. Default value: "POST". - :vartype method: str + :param method: Required. Rest API method for target endpoint. Possible values include: "POST". + :type method: str or ~azure.synapse.artifacts.models.WebHookActivityMethod :param url: Required. WebHook activity target endpoint and path. Type: string (or Expression with resultType string). :type url: object @@ -22788,7 +35487,7 @@ class WebHookActivity(Activity): _validation = { 'name': {'required': True}, 'type': {'required': True}, - 'method': {'required': True, 'constant': True}, + 'method': {'required': True}, 'url': {'required': True}, } @@ -22808,12 +35507,11 @@ class WebHookActivity(Activity): 'report_status_on_call_back': {'key': 'typeProperties.reportStatusOnCallBack', 'type': 'object'}, } - method = "POST" - def __init__( self, *, name: str, + method: Union[str, "WebHookActivityMethod"], url: object, additional_properties: Optional[Dict[str, object]] = None, description: Optional[str] = None, @@ -22827,7 +35525,8 @@ def __init__( **kwargs ): super(WebHookActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.type: str = 'WebHook' + self.type = 'WebHook' # type: str + self.method = method self.url = url self.timeout = timeout self.headers = headers @@ -22885,10 +35584,56 @@ def __init__( **kwargs ): super(WebLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Web' + self.type = 'Web' # type: str self.type_properties = type_properties +class WebSource(CopySource): + """A copy activity source for web page table. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + **kwargs + ): + super(WebSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'WebSource' # type: str + + class WebTableDataset(Dataset): """The dataset points to a HTML table in the web page. @@ -22960,90 +35705,115 @@ def __init__( **kwargs ): super(WebTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'WebTable' + self.type = 'WebTable' # type: str self.index = index self.path = path -class Workspace(Resource): - """Workspace resource type. +class Workspace(TrackedResource): + """A workspace. Variables are only populated by the server, and will be ignored when sending a request. - :ivar id: The resource identifier. + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. :vartype id: str - :ivar name: The resource name. + :ivar name: The name of the resource. :vartype name: str - :ivar type: The resource type. + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. :vartype type: str - :param location: The resource location. - :type location: str - :param tags: A set of tags. The resource tags. + :param tags: A set of tags. Resource tags. :type tags: dict[str, str] - :ivar e_tag: Etag identifies change in the resource. - :vartype e_tag: str - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param identity: Managed service identity of the workspace. - :type identity: ~azure.synapse.artifacts.models.WorkspaceIdentity - :ivar provisioning_state: Workspace provisioning state, example Succeeded. + :param location: Required. The geo-location where the resource lives. + :type location: str + :param identity: Identity of the workspace. + :type identity: ~azure.synapse.artifacts.models.ManagedIdentity + :param default_data_lake_storage: Workspace default data lake storage account details. + :type default_data_lake_storage: ~azure.synapse.artifacts.models.DataLakeStorageAccountDetails + :param sql_administrator_login_password: SQL administrator login password. + :type sql_administrator_login_password: str + :param managed_resource_group_name: Workspace managed resource group. The resource group name + uniquely identifies the resource group within the user subscriptionId. The resource group name + must be no longer than 90 characters long, and must be alphanumeric characters + (Char.IsLetterOrDigit()) and '-', '_', '(', ')' and'.'. Note that the name cannot end with '.'. + :type managed_resource_group_name: str + :ivar provisioning_state: Resource provisioning state. :vartype provisioning_state: str - :ivar create_time: Time the workspace was created in ISO8601 format. - :vartype create_time: ~datetime.datetime - :ivar version: Version of the workspace. - :vartype version: str - :param default_storage: Linked service reference. - :type default_storage: ~azure.synapse.artifacts.models.LinkedServiceReference - :param default_sql_server: Linked service reference. - :type default_sql_server: ~azure.synapse.artifacts.models.LinkedServiceReference + :param sql_administrator_login: Login for workspace SQL active directory administrator. + :type sql_administrator_login: str + :param virtual_network_profile: Virtual Network profile. + :type virtual_network_profile: ~azure.synapse.artifacts.models.VirtualNetworkProfile + :param connectivity_endpoints: Connectivity endpoints. + :type connectivity_endpoints: dict[str, str] + :param managed_virtual_network: Setting this to 'default' will ensure that all compute for this + workspace is in a virtual network managed on behalf of the user. + :type managed_virtual_network: str + :param private_endpoint_connections: Private endpoint connections to the workspace. + :type private_endpoint_connections: + list[~azure.synapse.artifacts.models.PrivateEndpointConnection] + :ivar extra_properties: Workspace level configs and feature flags. + :vartype extra_properties: dict[str, object] """ _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, 'type': {'readonly': True}, - 'e_tag': {'readonly': True}, + 'location': {'required': True}, 'provisioning_state': {'readonly': True}, - 'create_time': {'readonly': True}, - 'version': {'readonly': True}, + 'extra_properties': {'readonly': True}, } _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'location': {'key': 'location', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, - 'e_tag': {'key': 'eTag', 'type': 'str'}, - 'additional_properties': {'key': '', 'type': '{object}'}, - 'identity': {'key': 'identity', 'type': 'WorkspaceIdentity'}, + 'location': {'key': 'location', 'type': 'str'}, + 'identity': {'key': 'identity', 'type': 'ManagedIdentity'}, + 'default_data_lake_storage': {'key': 'properties.defaultDataLakeStorage', 'type': 'DataLakeStorageAccountDetails'}, + 'sql_administrator_login_password': {'key': 'properties.sqlAdministratorLoginPassword', 'type': 'str'}, + 'managed_resource_group_name': {'key': 'properties.managedResourceGroupName', 'type': 'str'}, 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, - 'create_time': {'key': 'properties.createTime', 'type': 'iso-8601'}, - 'version': {'key': 'properties.version', 'type': 'str'}, - 'default_storage': {'key': 'properties.defaultStorage', 'type': 'LinkedServiceReference'}, - 'default_sql_server': {'key': 'properties.defaultSqlServer', 'type': 'LinkedServiceReference'}, + 'sql_administrator_login': {'key': 'properties.sqlAdministratorLogin', 'type': 'str'}, + 'virtual_network_profile': {'key': 'properties.virtualNetworkProfile', 'type': 'VirtualNetworkProfile'}, + 'connectivity_endpoints': {'key': 'properties.connectivityEndpoints', 'type': '{str}'}, + 'managed_virtual_network': {'key': 'properties.managedVirtualNetwork', 'type': 'str'}, + 'private_endpoint_connections': {'key': 'properties.privateEndpointConnections', 'type': '[PrivateEndpointConnection]'}, + 'extra_properties': {'key': 'properties.extraProperties', 'type': '{object}'}, } def __init__( self, *, - location: Optional[str] = None, + location: str, tags: Optional[Dict[str, str]] = None, - additional_properties: Optional[Dict[str, object]] = None, - identity: Optional["WorkspaceIdentity"] = None, - default_storage: Optional["LinkedServiceReference"] = None, - default_sql_server: Optional["LinkedServiceReference"] = None, - **kwargs - ): - super(Workspace, self).__init__(location=location, tags=tags, **kwargs) - self.additional_properties = additional_properties + identity: Optional["ManagedIdentity"] = None, + default_data_lake_storage: Optional["DataLakeStorageAccountDetails"] = None, + sql_administrator_login_password: Optional[str] = None, + managed_resource_group_name: Optional[str] = None, + sql_administrator_login: Optional[str] = None, + virtual_network_profile: Optional["VirtualNetworkProfile"] = None, + connectivity_endpoints: Optional[Dict[str, str]] = None, + managed_virtual_network: Optional[str] = None, + private_endpoint_connections: Optional[List["PrivateEndpointConnection"]] = None, + **kwargs + ): + super(Workspace, self).__init__(tags=tags, location=location, **kwargs) self.identity = identity + self.default_data_lake_storage = default_data_lake_storage + self.sql_administrator_login_password = sql_administrator_login_password + self.managed_resource_group_name = managed_resource_group_name self.provisioning_state = None - self.create_time = None - self.version = None - self.default_storage = default_storage - self.default_sql_server = default_sql_server + self.sql_administrator_login = sql_administrator_login + self.virtual_network_profile = virtual_network_profile + self.connectivity_endpoints = connectivity_endpoints + self.managed_virtual_network = managed_virtual_network + self.private_endpoint_connections = private_endpoint_connections + self.extra_properties = None class WorkspaceIdentity(msrest.serialization.Model): @@ -23192,7 +35962,7 @@ def __init__( **kwargs ): super(XeroLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Xero' + self.type = 'Xero' # type: str self.host = host self.consumer_key = consumer_key self.private_key = private_key @@ -23266,10 +36036,67 @@ def __init__( **kwargs ): super(XeroObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'XeroObject' + self.type = 'XeroObject' # type: str self.table_name = table_name +class XeroSource(TabularSource): + """A copy activity Xero Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(XeroSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'XeroSource' # type: str + self.query = query + + class ZohoLinkedService(LinkedService): """Zoho server linked service. @@ -23345,7 +36172,7 @@ def __init__( **kwargs ): super(ZohoLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type: str = 'Zoho' + self.type = 'Zoho' # type: str self.endpoint = endpoint self.access_token = access_token self.use_encrypted_endpoints = use_encrypted_endpoints @@ -23418,5 +36245,62 @@ def __init__( **kwargs ): super(ZohoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type: str = 'ZohoObject' + self.type = 'ZohoObject' # type: str self.table_name = table_name + + +class ZohoSource(TabularSource): + """A copy activity Zoho server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + query: Optional[object] = None, + **kwargs + ): + super(ZohoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.type = 'ZohoSource' # type: str + self.query = query diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/__init__.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/__init__.py index b1056111cbe8..f5afded7d4f5 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/__init__.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/__init__.py @@ -17,6 +17,10 @@ from ._sql_script_operations import SqlScriptOperations from ._spark_job_definition_operations import SparkJobDefinitionOperations from ._notebook_operations import NotebookOperations +from ._workspace_operations import WorkspaceOperations +from ._sql_pools_operations import SqlPoolsOperations +from ._big_data_pools_operations import BigDataPoolsOperations +from ._integration_runtimes_operations import IntegrationRuntimesOperations __all__ = [ 'LinkedServiceOperations', @@ -30,4 +34,8 @@ 'SqlScriptOperations', 'SparkJobDefinitionOperations', 'NotebookOperations', + 'WorkspaceOperations', + 'SqlPoolsOperations', + 'BigDataPoolsOperations', + 'IntegrationRuntimesOperations', ] diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_big_data_pools_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_big_data_pools_operations.py new file mode 100644 index 000000000000..9c95da14db58 --- /dev/null +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_big_data_pools_operations.py @@ -0,0 +1,152 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class BigDataPoolsOperations(object): + """BigDataPoolsOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.synapse.artifacts.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + **kwargs # type: Any + ): + # type: (...) -> "models.BigDataPoolResourceInfoListResult" + """List Big Data Pools. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: BigDataPoolResourceInfoListResult, or the result of cls(response) + :rtype: ~azure.synapse.artifacts.models.BigDataPoolResourceInfoListResult + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.BigDataPoolResourceInfoListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2019-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ErrorContract, response) + raise HttpResponseError(response=response, model=error) + + deserialized = self._deserialize('BigDataPoolResourceInfoListResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + list.metadata = {'url': '/bigDataPools'} # type: ignore + + def get( + self, + big_data_pool_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "models.BigDataPoolResourceInfo" + """Get Big Data Pool. + + :param big_data_pool_name: The Big Data Pool name. + :type big_data_pool_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: BigDataPoolResourceInfo, or the result of cls(response) + :rtype: ~azure.synapse.artifacts.models.BigDataPoolResourceInfo + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.BigDataPoolResourceInfo"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2019-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + 'bigDataPoolName': self._serialize.url("big_data_pool_name", big_data_pool_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ErrorContract, response) + raise HttpResponseError(response=response, model=error) + + deserialized = self._deserialize('BigDataPoolResourceInfo', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/bigDataPools/{bigDataPoolName}'} # type: ignore diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_data_flow_debug_session_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_data_flow_debug_session_operations.py index d8e978f6ea89..7a914211996f 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_data_flow_debug_session_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_data_flow_debug_session_operations.py @@ -8,7 +8,7 @@ from typing import TYPE_CHECKING import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse @@ -53,10 +53,13 @@ def _create_data_flow_debug_session_initial( ): # type: (...) -> Optional["models.CreateDataFlowDebugSessionResponse"] cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.CreateDataFlowDebugSessionResponse"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self._create_data_flow_debug_session_initial.metadata['url'] # type: ignore @@ -72,13 +75,12 @@ def _create_data_flow_debug_session_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(request, 'CreateDataFlowDebugSessionRequest') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -172,14 +174,17 @@ def query_data_flow_debug_sessions_by_workspace( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.QueryDataFlowDebugSessionsResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -244,10 +249,13 @@ def add_data_flow( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.AddDataFlowToDebugSessionResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.add_data_flow.metadata['url'] # type: ignore @@ -263,13 +271,12 @@ def add_data_flow( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(request, 'DataFlowDebugPackage') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -302,10 +309,13 @@ def delete_data_flow_debug_session( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.delete_data_flow_debug_session.metadata['url'] # type: ignore @@ -321,12 +331,12 @@ def delete_data_flow_debug_session( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(request, 'DeleteDataFlowDebugSessionRequest') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -347,10 +357,13 @@ def _execute_command_initial( ): # type: (...) -> Optional["models.DataFlowDebugCommandResponse"] cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.DataFlowDebugCommandResponse"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self._execute_command_initial.metadata['url'] # type: ignore @@ -366,13 +379,12 @@ def _execute_command_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(request, 'DataFlowDebugCommandRequest') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_data_flow_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_data_flow_operations.py index 7b6e54e01416..6e4c9941c4a8 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_data_flow_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_data_flow_operations.py @@ -8,16 +8,18 @@ from typing import TYPE_CHECKING import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.polling.base_polling import LROBasePolling from .. import models if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -44,38 +46,27 @@ def __init__(self, client, config, serializer, deserializer): self._deserialize = deserializer self._config = config - def create_or_update_data_flow( + def _create_or_update_data_flow_initial( self, data_flow_name, # type: str properties, # type: "models.DataFlow" if_match=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> "models.DataFlowResource" - """Creates or updates a data flow. - - :param data_flow_name: The data flow name. - :type data_flow_name: str - :param properties: Data flow properties. - :type properties: ~azure.synapse.artifacts.models.DataFlow - :param if_match: ETag of the data flow entity. Should only be specified for update, for which - it should match existing entity or can be * for unconditional update. - :type if_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: DataFlowResource, or the result of cls(response) - :rtype: ~azure.synapse.artifacts.models.DataFlowResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + # type: (...) -> Optional["models.DataFlowResource"] + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.DataFlowResource"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) _data_flow = models.DataFlowResource(properties=properties) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL - url = self.create_or_update_data_flow.metadata['url'] # type: ignore + url = self._create_or_update_data_flow_initial.metadata['url'] # type: ignore path_format_arguments = { 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), 'dataFlowName': self._serialize.url("data_flow_name", data_flow_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), @@ -91,28 +82,96 @@ def create_or_update_data_flow( if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(_data_flow, 'DataFlowResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [200]: + if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.CloudError, response) raise HttpResponseError(response=response, model=error) - deserialized = self._deserialize('DataFlowResource', pipeline_response) + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('DataFlowResource', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - create_or_update_data_flow.metadata = {'url': '/dataflows/{dataFlowName}'} # type: ignore + _create_or_update_data_flow_initial.metadata = {'url': '/dataflows/{dataFlowName}'} # type: ignore + + def begin_create_or_update_data_flow( + self, + data_flow_name, # type: str + properties, # type: "models.DataFlow" + if_match=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> LROPoller["models.DataFlowResource"] + """Creates or updates a data flow. + + :param data_flow_name: The data flow name. + :type data_flow_name: str + :param properties: Data flow properties. + :type properties: ~azure.synapse.artifacts.models.DataFlow + :param if_match: ETag of the data flow entity. Should only be specified for update, for which + it should match existing entity or can be * for unconditional update. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either DataFlowResource or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.synapse.artifacts.models.DataFlowResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', False) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowResource"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_or_update_data_flow_initial( + data_flow_name=data_flow_name, + properties=properties, + if_match=if_match, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('DataFlowResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: polling_method = LROBasePolling(lro_delay, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_create_or_update_data_flow.metadata = {'url': '/dataflows/{dataFlowName}'} # type: ignore def get_data_flow( self, @@ -134,9 +193,12 @@ def get_data_flow( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self.get_data_flow.metadata['url'] # type: ignore @@ -154,7 +216,7 @@ def get_data_flow( header_parameters = {} # type: Dict[str, Any] if if_none_match is not None: header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -173,28 +235,22 @@ def get_data_flow( return deserialized get_data_flow.metadata = {'url': '/dataflows/{dataFlowName}'} # type: ignore - def delete_data_flow( + def _delete_data_flow_initial( self, data_flow_name, # type: str **kwargs # type: Any ): # type: (...) -> None - """Deletes a data flow. - - :param data_flow_name: The data flow name. - :type data_flow_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL - url = self.delete_data_flow.metadata['url'] # type: ignore + url = self._delete_data_flow_initial.metadata['url'] # type: ignore path_format_arguments = { 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), 'dataFlowName': self._serialize.url("data_flow_name", data_flow_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), @@ -207,12 +263,13 @@ def delete_data_flow( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [200, 204]: + if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.CloudError, response) raise HttpResponseError(response=response, model=error) @@ -220,7 +277,62 @@ def delete_data_flow( if cls: return cls(pipeline_response, None, {}) - delete_data_flow.metadata = {'url': '/dataflows/{dataFlowName}'} # type: ignore + _delete_data_flow_initial.metadata = {'url': '/dataflows/{dataFlowName}'} # type: ignore + + def begin_delete_data_flow( + self, + data_flow_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> LROPoller[None] + """Deletes a data flow. + + :param data_flow_name: The data flow name. + :type data_flow_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', False) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_data_flow_initial( + data_flow_name=data_flow_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: polling_method = LROBasePolling(lro_delay, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_delete_data_flow.metadata = {'url': '/dataflows/{dataFlowName}'} # type: ignore def get_data_flows_by_workspace( self, @@ -235,14 +347,17 @@ def get_data_flows_by_workspace( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_dataset_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_dataset_operations.py index b8ea8c5b1874..55d5e21411a8 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_dataset_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_dataset_operations.py @@ -8,16 +8,18 @@ from typing import TYPE_CHECKING import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.polling.base_polling import LROBasePolling from .. import models if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -57,14 +59,17 @@ def get_datasets_by_workspace( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.DatasetListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -113,38 +118,27 @@ def get_next(next_link=None): ) get_datasets_by_workspace.metadata = {'url': '/datasets'} # type: ignore - def create_or_update_dataset( + def _create_or_update_dataset_initial( self, dataset_name, # type: str properties, # type: "models.Dataset" if_match=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> "models.DatasetResource" - """Creates or updates a dataset. - - :param dataset_name: The dataset name. - :type dataset_name: str - :param properties: Dataset properties. - :type properties: ~azure.synapse.artifacts.models.Dataset - :param if_match: ETag of the dataset entity. Should only be specified for update, for which it - should match existing entity or can be * for unconditional update. - :type if_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: DatasetResource, or the result of cls(response) - :rtype: ~azure.synapse.artifacts.models.DatasetResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.DatasetResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + # type: (...) -> Optional["models.DatasetResource"] + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.DatasetResource"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) _dataset = models.DatasetResource(properties=properties) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL - url = self.create_or_update_dataset.metadata['url'] # type: ignore + url = self._create_or_update_dataset_initial.metadata['url'] # type: ignore path_format_arguments = { 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), 'datasetName': self._serialize.url("dataset_name", dataset_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), @@ -160,28 +154,96 @@ def create_or_update_dataset( if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(_dataset, 'DatasetResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [200]: + if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.CloudError, response) raise HttpResponseError(response=response, model=error) - deserialized = self._deserialize('DatasetResource', pipeline_response) + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('DatasetResource', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - create_or_update_dataset.metadata = {'url': '/datasets/{datasetName}'} # type: ignore + _create_or_update_dataset_initial.metadata = {'url': '/datasets/{datasetName}'} # type: ignore + + def begin_create_or_update_dataset( + self, + dataset_name, # type: str + properties, # type: "models.Dataset" + if_match=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> LROPoller["models.DatasetResource"] + """Creates or updates a dataset. + + :param dataset_name: The dataset name. + :type dataset_name: str + :param properties: Dataset properties. + :type properties: ~azure.synapse.artifacts.models.Dataset + :param if_match: ETag of the dataset entity. Should only be specified for update, for which it + should match existing entity or can be * for unconditional update. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either DatasetResource or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.synapse.artifacts.models.DatasetResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', False) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["models.DatasetResource"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_or_update_dataset_initial( + dataset_name=dataset_name, + properties=properties, + if_match=if_match, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('DatasetResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: polling_method = LROBasePolling(lro_delay, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_create_or_update_dataset.metadata = {'url': '/datasets/{datasetName}'} # type: ignore def get_dataset( self, @@ -203,9 +265,12 @@ def get_dataset( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.DatasetResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self.get_dataset.metadata['url'] # type: ignore @@ -223,7 +288,7 @@ def get_dataset( header_parameters = {} # type: Dict[str, Any] if if_none_match is not None: header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -244,28 +309,22 @@ def get_dataset( return deserialized get_dataset.metadata = {'url': '/datasets/{datasetName}'} # type: ignore - def delete_dataset( + def _delete_dataset_initial( self, dataset_name, # type: str **kwargs # type: Any ): # type: (...) -> None - """Deletes a dataset. - - :param dataset_name: The dataset name. - :type dataset_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL - url = self.delete_dataset.metadata['url'] # type: ignore + url = self._delete_dataset_initial.metadata['url'] # type: ignore path_format_arguments = { 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), 'datasetName': self._serialize.url("dataset_name", dataset_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), @@ -278,12 +337,13 @@ def delete_dataset( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [200, 204]: + if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.CloudError, response) raise HttpResponseError(response=response, model=error) @@ -291,4 +351,59 @@ def delete_dataset( if cls: return cls(pipeline_response, None, {}) - delete_dataset.metadata = {'url': '/datasets/{datasetName}'} # type: ignore + _delete_dataset_initial.metadata = {'url': '/datasets/{datasetName}'} # type: ignore + + def begin_delete_dataset( + self, + dataset_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> LROPoller[None] + """Deletes a dataset. + + :param dataset_name: The dataset name. + :type dataset_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', False) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_dataset_initial( + dataset_name=dataset_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: polling_method = LROBasePolling(lro_delay, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_delete_dataset.metadata = {'url': '/datasets/{datasetName}'} # type: ignore diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_integration_runtimes_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_integration_runtimes_operations.py new file mode 100644 index 000000000000..3d622cd8a2aa --- /dev/null +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_integration_runtimes_operations.py @@ -0,0 +1,152 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class IntegrationRuntimesOperations(object): + """IntegrationRuntimesOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.synapse.artifacts.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + **kwargs # type: Any + ): + # type: (...) -> "models.IntegrationRuntimeListResponse" + """List Integration Runtimes. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IntegrationRuntimeListResponse, or the result of cls(response) + :rtype: ~azure.synapse.artifacts.models.IntegrationRuntimeListResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeListResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2019-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ErrorContract, response) + raise HttpResponseError(response=response, model=error) + + deserialized = self._deserialize('IntegrationRuntimeListResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + list.metadata = {'url': '/integrationRuntimes'} # type: ignore + + def get( + self, + integration_runtime_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "models.IntegrationRuntimeResource" + """Get Integration Runtime. + + :param integration_runtime_name: The Integration Runtime name. + :type integration_runtime_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IntegrationRuntimeResource, or the result of cls(response) + :rtype: ~azure.synapse.artifacts.models.IntegrationRuntimeResource + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeResource"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2019-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ErrorContract, response) + raise HttpResponseError(response=response, model=error) + + deserialized = self._deserialize('IntegrationRuntimeResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/integrationRuntimes/{integrationRuntimeName}'} # type: ignore diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_linked_service_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_linked_service_operations.py index 81745ded6b96..9ec940e40b5a 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_linked_service_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_linked_service_operations.py @@ -8,16 +8,18 @@ from typing import TYPE_CHECKING import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.polling.base_polling import LROBasePolling from .. import models if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -57,14 +59,17 @@ def get_linked_services_by_workspace( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.LinkedServiceListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -113,38 +118,27 @@ def get_next(next_link=None): ) get_linked_services_by_workspace.metadata = {'url': '/linkedservices'} # type: ignore - def create_or_update_linked_service( + def _create_or_update_linked_service_initial( self, linked_service_name, # type: str properties, # type: "models.LinkedService" if_match=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> "models.LinkedServiceResource" - """Creates or updates a linked service. - - :param linked_service_name: The linked service name. - :type linked_service_name: str - :param properties: Properties of linked service. - :type properties: ~azure.synapse.artifacts.models.LinkedService - :param if_match: ETag of the linkedService entity. Should only be specified for update, for - which it should match existing entity or can be * for unconditional update. - :type if_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: LinkedServiceResource, or the result of cls(response) - :rtype: ~azure.synapse.artifacts.models.LinkedServiceResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.LinkedServiceResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + # type: (...) -> Optional["models.LinkedServiceResource"] + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.LinkedServiceResource"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) _linked_service = models.LinkedServiceResource(properties=properties) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL - url = self.create_or_update_linked_service.metadata['url'] # type: ignore + url = self._create_or_update_linked_service_initial.metadata['url'] # type: ignore path_format_arguments = { 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), 'linkedServiceName': self._serialize.url("linked_service_name", linked_service_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), @@ -160,28 +154,96 @@ def create_or_update_linked_service( if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(_linked_service, 'LinkedServiceResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [200]: + if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.CloudError, response) raise HttpResponseError(response=response, model=error) - deserialized = self._deserialize('LinkedServiceResource', pipeline_response) + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('LinkedServiceResource', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - create_or_update_linked_service.metadata = {'url': '/linkedservices/{linkedServiceName}'} # type: ignore + _create_or_update_linked_service_initial.metadata = {'url': '/linkedservices/{linkedServiceName}'} # type: ignore + + def begin_create_or_update_linked_service( + self, + linked_service_name, # type: str + properties, # type: "models.LinkedService" + if_match=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> LROPoller["models.LinkedServiceResource"] + """Creates or updates a linked service. + + :param linked_service_name: The linked service name. + :type linked_service_name: str + :param properties: Properties of linked service. + :type properties: ~azure.synapse.artifacts.models.LinkedService + :param if_match: ETag of the linkedService entity. Should only be specified for update, for + which it should match existing entity or can be * for unconditional update. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either LinkedServiceResource or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.synapse.artifacts.models.LinkedServiceResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', False) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["models.LinkedServiceResource"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_or_update_linked_service_initial( + linked_service_name=linked_service_name, + properties=properties, + if_match=if_match, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('LinkedServiceResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: polling_method = LROBasePolling(lro_delay, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_create_or_update_linked_service.metadata = {'url': '/linkedservices/{linkedServiceName}'} # type: ignore def get_linked_service( self, @@ -204,9 +266,12 @@ def get_linked_service( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.LinkedServiceResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self.get_linked_service.metadata['url'] # type: ignore @@ -224,7 +289,7 @@ def get_linked_service( header_parameters = {} # type: Dict[str, Any] if if_none_match is not None: header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -245,28 +310,22 @@ def get_linked_service( return deserialized get_linked_service.metadata = {'url': '/linkedservices/{linkedServiceName}'} # type: ignore - def delete_linked_service( + def _delete_linked_service_initial( self, linked_service_name, # type: str **kwargs # type: Any ): # type: (...) -> None - """Deletes a linked service. - - :param linked_service_name: The linked service name. - :type linked_service_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL - url = self.delete_linked_service.metadata['url'] # type: ignore + url = self._delete_linked_service_initial.metadata['url'] # type: ignore path_format_arguments = { 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), 'linkedServiceName': self._serialize.url("linked_service_name", linked_service_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), @@ -279,12 +338,13 @@ def delete_linked_service( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [200, 204]: + if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.CloudError, response) raise HttpResponseError(response=response, model=error) @@ -292,4 +352,59 @@ def delete_linked_service( if cls: return cls(pipeline_response, None, {}) - delete_linked_service.metadata = {'url': '/linkedservices/{linkedServiceName}'} # type: ignore + _delete_linked_service_initial.metadata = {'url': '/linkedservices/{linkedServiceName}'} # type: ignore + + def begin_delete_linked_service( + self, + linked_service_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> LROPoller[None] + """Deletes a linked service. + + :param linked_service_name: The linked service name. + :type linked_service_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', False) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_linked_service_initial( + linked_service_name=linked_service_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: polling_method = LROBasePolling(lro_delay, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_delete_linked_service.metadata = {'url': '/linkedservices/{linkedServiceName}'} # type: ignore diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_notebook_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_notebook_operations.py index a9bfb3cb4812..4aa1b37ac30e 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_notebook_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_notebook_operations.py @@ -8,16 +8,18 @@ from typing import TYPE_CHECKING import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.polling.base_polling import LROBasePolling from .. import models if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -57,14 +59,17 @@ def get_notebooks_by_workspace( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.NotebookListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -126,14 +131,17 @@ def get_notebook_summary_by_work_space( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.NotebookListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -182,38 +190,27 @@ def get_next(next_link=None): ) get_notebook_summary_by_work_space.metadata = {'url': '/notebooks/summary'} # type: ignore - def create_or_update_notebook( + def _create_or_update_notebook_initial( self, notebook_name, # type: str properties, # type: "models.Notebook" if_match=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> "models.NotebookResource" - """Creates or updates a Note Book. - - :param notebook_name: The notebook name. - :type notebook_name: str - :param properties: Properties of Notebook. - :type properties: ~azure.synapse.artifacts.models.Notebook - :param if_match: ETag of the Note book entity. Should only be specified for update, for which - it should match existing entity or can be * for unconditional update. - :type if_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: NotebookResource, or the result of cls(response) - :rtype: ~azure.synapse.artifacts.models.NotebookResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.NotebookResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + # type: (...) -> Optional["models.NotebookResource"] + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.NotebookResource"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) _notebook = models.NotebookResource(properties=properties) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL - url = self.create_or_update_notebook.metadata['url'] # type: ignore + url = self._create_or_update_notebook_initial.metadata['url'] # type: ignore path_format_arguments = { 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), 'notebookName': self._serialize.url("notebook_name", notebook_name, 'str'), @@ -229,28 +226,96 @@ def create_or_update_notebook( if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(_notebook, 'NotebookResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [200]: + if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.CloudError, response) raise HttpResponseError(response=response, model=error) - deserialized = self._deserialize('NotebookResource', pipeline_response) + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('NotebookResource', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - create_or_update_notebook.metadata = {'url': '/notebooks/{notebookName}'} # type: ignore + _create_or_update_notebook_initial.metadata = {'url': '/notebooks/{notebookName}'} # type: ignore + + def begin_create_or_update_notebook( + self, + notebook_name, # type: str + properties, # type: "models.Notebook" + if_match=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> LROPoller["models.NotebookResource"] + """Creates or updates a Note Book. + + :param notebook_name: The notebook name. + :type notebook_name: str + :param properties: Properties of Notebook. + :type properties: ~azure.synapse.artifacts.models.Notebook + :param if_match: ETag of the Note book entity. Should only be specified for update, for which + it should match existing entity or can be * for unconditional update. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either NotebookResource or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.synapse.artifacts.models.NotebookResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', False) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["models.NotebookResource"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_or_update_notebook_initial( + notebook_name=notebook_name, + properties=properties, + if_match=if_match, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('NotebookResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: polling_method = LROBasePolling(lro_delay, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_create_or_update_notebook.metadata = {'url': '/notebooks/{notebookName}'} # type: ignore def get_notebook( self, @@ -272,9 +337,12 @@ def get_notebook( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.NotebookResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self.get_notebook.metadata['url'] # type: ignore @@ -292,7 +360,7 @@ def get_notebook( header_parameters = {} # type: Dict[str, Any] if if_none_match is not None: header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -313,28 +381,22 @@ def get_notebook( return deserialized get_notebook.metadata = {'url': '/notebooks/{notebookName}'} # type: ignore - def delete_notebook( + def _delete_notebook_initial( self, notebook_name, # type: str **kwargs # type: Any ): # type: (...) -> None - """Deletes a Note book. - - :param notebook_name: The notebook name. - :type notebook_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL - url = self.delete_notebook.metadata['url'] # type: ignore + url = self._delete_notebook_initial.metadata['url'] # type: ignore path_format_arguments = { 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), 'notebookName': self._serialize.url("notebook_name", notebook_name, 'str'), @@ -347,12 +409,13 @@ def delete_notebook( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [200, 204]: + if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.CloudError, response) raise HttpResponseError(response=response, model=error) @@ -360,4 +423,59 @@ def delete_notebook( if cls: return cls(pipeline_response, None, {}) - delete_notebook.metadata = {'url': '/notebooks/{notebookName}'} # type: ignore + _delete_notebook_initial.metadata = {'url': '/notebooks/{notebookName}'} # type: ignore + + def begin_delete_notebook( + self, + notebook_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> LROPoller[None] + """Deletes a Note book. + + :param notebook_name: The notebook name. + :type notebook_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', False) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_notebook_initial( + notebook_name=notebook_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: polling_method = LROBasePolling(lro_delay, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_delete_notebook.metadata = {'url': '/notebooks/{notebookName}'} # type: ignore diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_pipeline_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_pipeline_operations.py index 67076af5c3fa..f731d88c245e 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_pipeline_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_pipeline_operations.py @@ -8,16 +8,18 @@ from typing import TYPE_CHECKING import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.polling.base_polling import LROBasePolling from .. import models if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -57,14 +59,17 @@ def get_pipelines_by_workspace( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -113,36 +118,25 @@ def get_next(next_link=None): ) get_pipelines_by_workspace.metadata = {'url': '/pipelines'} # type: ignore - def create_or_update_pipeline( + def _create_or_update_pipeline_initial( self, pipeline_name, # type: str pipeline, # type: "models.PipelineResource" if_match=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> "models.PipelineResource" - """Creates or updates a pipeline. - - :param pipeline_name: The pipeline name. - :type pipeline_name: str - :param pipeline: Pipeline resource definition. - :type pipeline: ~azure.synapse.artifacts.models.PipelineResource - :param if_match: ETag of the pipeline entity. Should only be specified for update, for which - it should match existing entity or can be * for unconditional update. - :type if_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: PipelineResource, or the result of cls(response) - :rtype: ~azure.synapse.artifacts.models.PipelineResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + # type: (...) -> Optional["models.PipelineResource"] + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.PipelineResource"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL - url = self.create_or_update_pipeline.metadata['url'] # type: ignore + url = self._create_or_update_pipeline_initial.metadata['url'] # type: ignore path_format_arguments = { 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), 'pipelineName': self._serialize.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), @@ -158,28 +152,96 @@ def create_or_update_pipeline( if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(pipeline, 'PipelineResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [200]: + if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.CloudError, response) raise HttpResponseError(response=response, model=error) - deserialized = self._deserialize('PipelineResource', pipeline_response) + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('PipelineResource', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - create_or_update_pipeline.metadata = {'url': '/pipelines/{pipelineName}'} # type: ignore + _create_or_update_pipeline_initial.metadata = {'url': '/pipelines/{pipelineName}'} # type: ignore + + def begin_create_or_update_pipeline( + self, + pipeline_name, # type: str + pipeline, # type: "models.PipelineResource" + if_match=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> LROPoller["models.PipelineResource"] + """Creates or updates a pipeline. + + :param pipeline_name: The pipeline name. + :type pipeline_name: str + :param pipeline: Pipeline resource definition. + :type pipeline: ~azure.synapse.artifacts.models.PipelineResource + :param if_match: ETag of the pipeline entity. Should only be specified for update, for which + it should match existing entity or can be * for unconditional update. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either PipelineResource or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.synapse.artifacts.models.PipelineResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', False) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineResource"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_or_update_pipeline_initial( + pipeline_name=pipeline_name, + pipeline=pipeline, + if_match=if_match, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('PipelineResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: polling_method = LROBasePolling(lro_delay, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_create_or_update_pipeline.metadata = {'url': '/pipelines/{pipelineName}'} # type: ignore def get_pipeline( self, @@ -201,9 +263,12 @@ def get_pipeline( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.PipelineResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self.get_pipeline.metadata['url'] # type: ignore @@ -221,7 +286,7 @@ def get_pipeline( header_parameters = {} # type: Dict[str, Any] if if_none_match is not None: header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -242,28 +307,22 @@ def get_pipeline( return deserialized get_pipeline.metadata = {'url': '/pipelines/{pipelineName}'} # type: ignore - def delete_pipeline( + def _delete_pipeline_initial( self, pipeline_name, # type: str **kwargs # type: Any ): # type: (...) -> None - """Deletes a pipeline. - - :param pipeline_name: The pipeline name. - :type pipeline_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL - url = self.delete_pipeline.metadata['url'] # type: ignore + url = self._delete_pipeline_initial.metadata['url'] # type: ignore path_format_arguments = { 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), 'pipelineName': self._serialize.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), @@ -276,12 +335,13 @@ def delete_pipeline( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [200, 204]: + if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.CloudError, response) raise HttpResponseError(response=response, model=error) @@ -289,7 +349,62 @@ def delete_pipeline( if cls: return cls(pipeline_response, None, {}) - delete_pipeline.metadata = {'url': '/pipelines/{pipelineName}'} # type: ignore + _delete_pipeline_initial.metadata = {'url': '/pipelines/{pipelineName}'} # type: ignore + + def begin_delete_pipeline( + self, + pipeline_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> LROPoller[None] + """Deletes a pipeline. + + :param pipeline_name: The pipeline name. + :type pipeline_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', False) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_pipeline_initial( + pipeline_name=pipeline_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: polling_method = LROBasePolling(lro_delay, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_delete_pipeline.metadata = {'url': '/pipelines/{pipelineName}'} # type: ignore def create_pipeline_run( self, @@ -323,10 +438,13 @@ def create_pipeline_run( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.CreateRunResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.create_pipeline_run.metadata['url'] # type: ignore @@ -349,7 +467,7 @@ def create_pipeline_run( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] if parameters is not None: @@ -358,11 +476,10 @@ def create_pipeline_run( body_content = None body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [200]: + if response.status_code not in [202]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.CloudError, response) raise HttpResponseError(response=response, model=error) diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_pipeline_run_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_pipeline_run_operations.py index e38dcb17fb15..b4465926cd9e 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_pipeline_run_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_pipeline_run_operations.py @@ -8,7 +8,7 @@ from typing import TYPE_CHECKING import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse @@ -59,10 +59,13 @@ def query_pipeline_runs_by_workspace( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineRunsQueryResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.query_pipeline_runs_by_workspace.metadata['url'] # type: ignore @@ -78,13 +81,12 @@ def query_pipeline_runs_by_workspace( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(filter_parameters, 'RunFilterParameters') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -117,9 +119,12 @@ def get_pipeline_run( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineRun"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self.get_pipeline_run.metadata['url'] # type: ignore @@ -135,7 +140,7 @@ def get_pipeline_run( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -176,10 +181,13 @@ def query_activity_runs( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ActivityRunsQueryResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.query_activity_runs.metadata['url'] # type: ignore @@ -197,13 +205,12 @@ def query_activity_runs( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(filter_parameters, 'RunFilterParameters') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -240,9 +247,12 @@ def cancel_pipeline_run( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self.cancel_pipeline_run.metadata['url'] # type: ignore @@ -260,6 +270,7 @@ def cancel_pipeline_run( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_spark_job_definition_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_spark_job_definition_operations.py index 8051a81d8afc..2140afe302d4 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_spark_job_definition_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_spark_job_definition_operations.py @@ -8,7 +8,7 @@ from typing import TYPE_CHECKING import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse @@ -59,14 +59,17 @@ def get_spark_job_definitions_by_workspace( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SparkJobDefinitionsListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -138,12 +141,15 @@ def create_or_update_spark_job_definition( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SparkJobDefinitionResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) _spark_job_definition = models.SparkJobDefinitionResource(properties=properties) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.create_or_update_spark_job_definition.metadata['url'] # type: ignore @@ -162,13 +168,12 @@ def create_or_update_spark_job_definition( if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(_spark_job_definition, 'SparkJobDefinitionResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -206,9 +211,12 @@ def get_spark_job_definition( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.SparkJobDefinitionResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self.get_spark_job_definition.metadata['url'] # type: ignore @@ -226,7 +234,7 @@ def get_spark_job_definition( header_parameters = {} # type: Dict[str, Any] if if_none_match is not None: header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -263,9 +271,12 @@ def delete_spark_job_definition( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self.delete_spark_job_definition.metadata['url'] # type: ignore @@ -281,6 +292,7 @@ def delete_spark_job_definition( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -303,9 +315,12 @@ def _execute_spark_job_definition_initial( ): # type: (...) -> "models.SparkBatchJob" cls = kwargs.pop('cls', None) # type: ClsType["models.SparkBatchJob"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self._execute_spark_job_definition_initial.metadata['url'] # type: ignore @@ -321,7 +336,7 @@ def _execute_spark_job_definition_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -409,12 +424,15 @@ def _debug_spark_job_definition_initial( ): # type: (...) -> "models.SparkBatchJob" cls = kwargs.pop('cls', None) # type: ClsType["models.SparkBatchJob"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) _spark_job_definition_azure_resource = models.SparkJobDefinitionResource(properties=properties) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self._debug_spark_job_definition_initial.metadata['url'] # type: ignore @@ -430,13 +448,12 @@ def _debug_spark_job_definition_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(_spark_job_definition_azure_resource, 'SparkJobDefinitionResource') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_sql_pools_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_sql_pools_operations.py new file mode 100644 index 000000000000..0f5b2d5293ae --- /dev/null +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_sql_pools_operations.py @@ -0,0 +1,152 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class SqlPoolsOperations(object): + """SqlPoolsOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.synapse.artifacts.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + **kwargs # type: Any + ): + # type: (...) -> "models.SqlPoolInfoListResult" + """List Sql Pools. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: SqlPoolInfoListResult, or the result of cls(response) + :rtype: ~azure.synapse.artifacts.models.SqlPoolInfoListResult + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.SqlPoolInfoListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2019-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ErrorContract, response) + raise HttpResponseError(response=response, model=error) + + deserialized = self._deserialize('SqlPoolInfoListResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + list.metadata = {'url': '/sqlPools'} # type: ignore + + def get( + self, + sql_pool_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "models.SqlPool" + """Get Sql Pool. + + :param sql_pool_name: The Sql Pool name. + :type sql_pool_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: SqlPool, or the result of cls(response) + :rtype: ~azure.synapse.artifacts.models.SqlPool + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.SqlPool"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2019-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + 'sqlPoolName': self._serialize.url("sql_pool_name", sql_pool_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ErrorContract, response) + raise HttpResponseError(response=response, model=error) + + deserialized = self._deserialize('SqlPool', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/sqlPools/{sqlPoolName}'} # type: ignore diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_sql_script_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_sql_script_operations.py index 78a44563ddd3..9a21204f45da 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_sql_script_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_sql_script_operations.py @@ -8,7 +8,7 @@ from typing import TYPE_CHECKING import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse @@ -57,14 +57,17 @@ def get_sql_scripts_by_workspace( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SqlScriptsListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -136,12 +139,15 @@ def create_or_update_sql_script( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SqlScriptResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) _sql_script = models.SqlScriptResource(properties=properties) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.create_or_update_sql_script.metadata['url'] # type: ignore @@ -160,13 +166,12 @@ def create_or_update_sql_script( if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(_sql_script, 'SqlScriptResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -203,9 +208,12 @@ def get_sql_script( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.SqlScriptResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self.get_sql_script.metadata['url'] # type: ignore @@ -223,7 +231,7 @@ def get_sql_script( header_parameters = {} # type: Dict[str, Any] if if_none_match is not None: header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -260,9 +268,12 @@ def delete_sql_script( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self.delete_sql_script.metadata['url'] # type: ignore @@ -278,6 +289,7 @@ def delete_sql_script( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_trigger_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_trigger_operations.py index b403f524caa4..5d6ef2bf8c6f 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_trigger_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_trigger_operations.py @@ -8,7 +8,7 @@ from typing import TYPE_CHECKING import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse @@ -59,14 +59,17 @@ def get_triggers_by_workspace( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -115,38 +118,27 @@ def get_next(next_link=None): ) get_triggers_by_workspace.metadata = {'url': '/triggers'} # type: ignore - def create_or_update_trigger( + def _create_or_update_trigger_initial( self, trigger_name, # type: str properties, # type: "models.Trigger" if_match=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> "models.TriggerResource" - """Creates or updates a trigger. - - :param trigger_name: The trigger name. - :type trigger_name: str - :param properties: Properties of the trigger. - :type properties: ~azure.synapse.artifacts.models.Trigger - :param if_match: ETag of the trigger entity. Should only be specified for update, for which it - should match existing entity or can be * for unconditional update. - :type if_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: TriggerResource, or the result of cls(response) - :rtype: ~azure.synapse.artifacts.models.TriggerResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + # type: (...) -> Optional["models.TriggerResource"] + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerResource"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) _trigger = models.TriggerResource(properties=properties) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL - url = self.create_or_update_trigger.metadata['url'] # type: ignore + url = self._create_or_update_trigger_initial.metadata['url'] # type: ignore path_format_arguments = { 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), @@ -162,28 +154,96 @@ def create_or_update_trigger( if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(_trigger, 'TriggerResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [200]: + if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.CloudError, response) raise HttpResponseError(response=response, model=error) - deserialized = self._deserialize('TriggerResource', pipeline_response) + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('TriggerResource', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - create_or_update_trigger.metadata = {'url': '/triggers/{triggerName}'} # type: ignore + _create_or_update_trigger_initial.metadata = {'url': '/triggers/{triggerName}'} # type: ignore + + def begin_create_or_update_trigger( + self, + trigger_name, # type: str + properties, # type: "models.Trigger" + if_match=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> LROPoller["models.TriggerResource"] + """Creates or updates a trigger. + + :param trigger_name: The trigger name. + :type trigger_name: str + :param properties: Properties of the trigger. + :type properties: ~azure.synapse.artifacts.models.Trigger + :param if_match: ETag of the trigger entity. Should only be specified for update, for which it + should match existing entity or can be * for unconditional update. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either TriggerResource or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.synapse.artifacts.models.TriggerResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', False) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerResource"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_or_update_trigger_initial( + trigger_name=trigger_name, + properties=properties, + if_match=if_match, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('TriggerResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: polling_method = LROBasePolling(lro_delay, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_create_or_update_trigger.metadata = {'url': '/triggers/{triggerName}'} # type: ignore def get_trigger( self, @@ -205,9 +265,12 @@ def get_trigger( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self.get_trigger.metadata['url'] # type: ignore @@ -225,7 +288,7 @@ def get_trigger( header_parameters = {} # type: Dict[str, Any] if if_none_match is not None: header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -246,28 +309,22 @@ def get_trigger( return deserialized get_trigger.metadata = {'url': '/triggers/{triggerName}'} # type: ignore - def delete_trigger( + def _delete_trigger_initial( self, trigger_name, # type: str **kwargs # type: Any ): # type: (...) -> None - """Deletes a trigger. - - :param trigger_name: The trigger name. - :type trigger_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL - url = self.delete_trigger.metadata['url'] # type: ignore + url = self._delete_trigger_initial.metadata['url'] # type: ignore path_format_arguments = { 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), @@ -280,12 +337,13 @@ def delete_trigger( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [200, 204]: + if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.CloudError, response) raise HttpResponseError(response=response, model=error) @@ -293,7 +351,62 @@ def delete_trigger( if cls: return cls(pipeline_response, None, {}) - delete_trigger.metadata = {'url': '/triggers/{triggerName}'} # type: ignore + _delete_trigger_initial.metadata = {'url': '/triggers/{triggerName}'} # type: ignore + + def begin_delete_trigger( + self, + trigger_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> LROPoller[None] + """Deletes a trigger. + + :param trigger_name: The trigger name. + :type trigger_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', False) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_trigger_initial( + trigger_name=trigger_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: polling_method = LROBasePolling(lro_delay, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_delete_trigger.metadata = {'url': '/triggers/{triggerName}'} # type: ignore def _subscribe_trigger_to_events_initial( self, @@ -302,9 +415,12 @@ def _subscribe_trigger_to_events_initial( ): # type: (...) -> Optional["models.TriggerSubscriptionOperationStatus"] cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerSubscriptionOperationStatus"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self._subscribe_trigger_to_events_initial.metadata['url'] # type: ignore @@ -320,7 +436,7 @@ def _subscribe_trigger_to_events_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -415,9 +531,12 @@ def get_event_subscription_status( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerSubscriptionOperationStatus"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self.get_event_subscription_status.metadata['url'] # type: ignore @@ -433,7 +552,7 @@ def get_event_subscription_status( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -459,9 +578,12 @@ def _unsubscribe_trigger_from_events_initial( ): # type: (...) -> Optional["models.TriggerSubscriptionOperationStatus"] cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerSubscriptionOperationStatus"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self._unsubscribe_trigger_from_events_initial.metadata['url'] # type: ignore @@ -477,7 +599,7 @@ def _unsubscribe_trigger_from_events_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -563,9 +685,12 @@ def _start_trigger_initial( ): # type: (...) -> None cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self._start_trigger_initial.metadata['url'] # type: ignore @@ -581,6 +706,7 @@ def _start_trigger_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -658,9 +784,12 @@ def _stop_trigger_initial( ): # type: (...) -> None cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self._stop_trigger_initial.metadata['url'] # type: ignore @@ -676,6 +805,7 @@ def _stop_trigger_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_trigger_run_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_trigger_run_operations.py index 5d34234f1b10..ba4a1dda25fd 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_trigger_run_operations.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_trigger_run_operations.py @@ -8,7 +8,7 @@ from typing import TYPE_CHECKING import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse @@ -62,9 +62,12 @@ def rerun_trigger_instance( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" + accept = "application/json" # Construct URL url = self.rerun_trigger_instance.metadata['url'] # type: ignore @@ -81,6 +84,7 @@ def rerun_trigger_instance( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -96,6 +100,63 @@ def rerun_trigger_instance( rerun_trigger_instance.metadata = {'url': '/triggers/{triggerName}/triggerRuns/{runId}/rerun'} # type: ignore + def cancel_trigger_instance( + self, + trigger_name, # type: str + run_id, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + """Cancel single trigger instance by runId. + + :param trigger_name: The trigger name. + :type trigger_name: str + :param run_id: The pipeline run identifier. + :type run_id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2019-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.cancel_trigger_instance.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + 'runId': self._serialize.url("run_id", run_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.CloudError, response) + raise HttpResponseError(response=response, model=error) + + if cls: + return cls(pipeline_response, None, {}) + + cancel_trigger_instance.metadata = {'url': '/triggers/{triggerName}/triggerRuns/{runId}/cancel'} # type: ignore + def query_trigger_runs_by_workspace( self, filter_parameters, # type: "models.RunFilterParameters" @@ -112,10 +173,13 @@ def query_trigger_runs_by_workspace( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerRunsQueryResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01-preview" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.query_trigger_runs_by_workspace.metadata['url'] # type: ignore @@ -131,13 +195,12 @@ def query_trigger_runs_by_workspace( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(filter_parameters, 'RunFilterParameters') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_workspace_operations.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_workspace_operations.py new file mode 100644 index 000000000000..6c361602a712 --- /dev/null +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/operations/_workspace_operations.py @@ -0,0 +1,96 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class WorkspaceOperations(object): + """WorkspaceOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.synapse.artifacts.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def get( + self, + **kwargs # type: Any + ): + # type: (...) -> "models.Workspace" + """Get Workspace. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Workspace, or the result of cls(response) + :rtype: ~azure.synapse.artifacts.models.Workspace + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.Workspace"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2019-06-01-preview" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ErrorContract, response) + raise HttpResponseError(response=response, model=error) + + deserialized = self._deserialize('Workspace', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/workspace'} # type: ignore diff --git a/sdk/synapse/azure-synapse-spark/CHANGELOG.md b/sdk/synapse/azure-synapse-spark/CHANGELOG.md index 9fc398f95895..bb80dbe7182f 100644 --- a/sdk/synapse/azure-synapse-spark/CHANGELOG.md +++ b/sdk/synapse/azure-synapse-spark/CHANGELOG.md @@ -1,5 +1,9 @@ # Release History +## 0.3.0 (2020-09-15) + +* Internal bugfixes (re-generated with latest generator) + ## 0.2.0 (2020-07-01) * Initial Release diff --git a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/__init__.py b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/__init__.py index 225215d0675c..5b37860b9ba0 100644 --- a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/__init__.py +++ b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/__init__.py @@ -13,7 +13,7 @@ __all__ = ['SparkClient'] try: - from ._patch import patch_sdk + from ._patch import patch_sdk # type: ignore patch_sdk() except ImportError: pass diff --git a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/_configuration.py b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/_configuration.py index c846fd6d4cc4..574b3dcae0f8 100644 --- a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/_configuration.py +++ b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/_configuration.py @@ -59,8 +59,7 @@ def __init__( self.endpoint = endpoint self.spark_pool_name = spark_pool_name self.livy_api_version = livy_api_version - self.credential_scopes = ['https://dev.azuresynapse.net/.default'] - self.credential_scopes.extend(kwargs.pop('credential_scopes', [])) + self.credential_scopes = kwargs.pop('credential_scopes', ['https://dev.azuresynapse.net/.default']) kwargs.setdefault('sdk_moniker', 'synapse/{}'.format(VERSION)) self._configure(**kwargs) @@ -73,6 +72,7 @@ def _configure( self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get('http_logging_policy') or policies.HttpLoggingPolicy(**kwargs) self.retry_policy = kwargs.get('retry_policy') or policies.RetryPolicy(**kwargs) self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) self.redirect_policy = kwargs.get('redirect_policy') or policies.RedirectPolicy(**kwargs) diff --git a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/_metadata.json b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/_metadata.json deleted file mode 100644 index 8eff44cc11b7..000000000000 --- a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/_metadata.json +++ /dev/null @@ -1,78 +0,0 @@ -{ - "chosen_version": "2019-11-01-preview", - "total_api_version_list": ["2019-11-01-preview"], - "client": { - "name": "SparkClient", - "filename": "_spark_client", - "description": "SparkClient." - }, - "global_parameters": { - "sync_method": { - "credential": { - "method_signature": "credential, # type: \"TokenCredential\"", - "description": "Credential needed for the client to connect to Azure.", - "docstring_type": "~azure.core.credentials.TokenCredential", - "required": true - }, - "endpoint": { - "method_signature": "endpoint, # type: str", - "description": "The workspace development endpoint, for example https://myworkspace.dev.azuresynapse.net.", - "docstring_type": "str", - "required": true - }, - "spark_pool_name": { - "method_signature": "spark_pool_name, # type: str", - "description": "Name of the spark pool.", - "docstring_type": "str", - "required": true - }, - "livy_api_version": { - "method_signature": "livy_api_version=\"2019-11-01-preview\", # type: str", - "description": "Valid api-version for the request.", - "docstring_type": "str", - "required": true - } - }, - "async_method": { - "credential": { - "method_signature": "credential, # type: \"AsyncTokenCredential\"", - "description": "Credential needed for the client to connect to Azure.", - "docstring_type": "~azure.core.credentials_async.AsyncTokenCredential", - "required": true - }, - "endpoint": { - "method_signature": "endpoint, # type: str", - "description": "The workspace development endpoint, for example https://myworkspace.dev.azuresynapse.net.", - "docstring_type": "str", - "required": true - }, - "spark_pool_name": { - "method_signature": "spark_pool_name, # type: str", - "description": "Name of the spark pool.", - "docstring_type": "str", - "required": true - }, - "livy_api_version": { - "method_signature": "livy_api_version=\"2019-11-01-preview\", # type: str", - "description": "Valid api-version for the request.", - "docstring_type": "str", - "required": true - } - }, - "constant": { - }, - "call": "credential, endpoint, spark_pool_name, livy_api_version" - }, - "config": { - "credential": true, - "credential_scopes": ["https://dev.azuresynapse.net/.default"] - }, - "operation_groups": { - "spark_batch": "SparkBatchOperations", - "spark_session": "SparkSessionOperations" - }, - "operation_mixins": { - }, - "sync_imports": "None", - "async_imports": "None" -} \ No newline at end of file diff --git a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/_spark_client.py b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/_spark_client.py index 7b3664b1ce60..a3dfa46c408d 100644 --- a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/_spark_client.py +++ b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/_spark_client.py @@ -38,7 +38,6 @@ class SparkClient(object): :type spark_pool_name: str :param livy_api_version: Valid api-version for the request. :type livy_api_version: str - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. """ def __init__( @@ -56,6 +55,7 @@ def __init__( client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} self._serialize = Serializer(client_models) + self._serialize.client_side_validation = False self._deserialize = Deserializer(client_models) self.spark_batch = SparkBatchOperations( diff --git a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/_version.py b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/_version.py index 035146e99a22..92721eef7dd5 100644 --- a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/_version.py +++ b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/_version.py @@ -6,4 +6,4 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "0.2.0" +VERSION = "0.3.0" diff --git a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/__init__.py b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/__init__.py index a4411e9376f3..04ee8cc75d0e 100644 --- a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/__init__.py +++ b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/__init__.py @@ -6,5 +6,5 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from ._spark_client_async import SparkClient +from ._spark_client import SparkClient __all__ = ['SparkClient'] diff --git a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/_configuration_async.py b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/_configuration.py similarity index 94% rename from sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/_configuration_async.py rename to sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/_configuration.py index 25bc033d8161..3d748483e56f 100644 --- a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/_configuration_async.py +++ b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/_configuration.py @@ -56,8 +56,7 @@ def __init__( self.endpoint = endpoint self.spark_pool_name = spark_pool_name self.livy_api_version = livy_api_version - self.credential_scopes = ['https://dev.azuresynapse.net/.default'] - self.credential_scopes.extend(kwargs.pop('credential_scopes', [])) + self.credential_scopes = kwargs.pop('credential_scopes', ['https://dev.azuresynapse.net/.default']) kwargs.setdefault('sdk_moniker', 'synapse/{}'.format(VERSION)) self._configure(**kwargs) @@ -69,6 +68,7 @@ def _configure( self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get('http_logging_policy') or policies.HttpLoggingPolicy(**kwargs) self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs) self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs) diff --git a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/_spark_client_async.py b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/_spark_client.py similarity index 84% rename from sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/_spark_client_async.py rename to sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/_spark_client.py index 7ba873de289f..94b34fd756b0 100644 --- a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/_spark_client_async.py +++ b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/_spark_client.py @@ -15,9 +15,9 @@ # pylint: disable=unused-import,ungrouped-imports from azure.core.credentials_async import AsyncTokenCredential -from ._configuration_async import SparkClientConfiguration -from .operations_async import SparkBatchOperations -from .operations_async import SparkSessionOperations +from ._configuration import SparkClientConfiguration +from .operations import SparkBatchOperations +from .operations import SparkSessionOperations from .. import models @@ -25,9 +25,9 @@ class SparkClient(object): """SparkClient. :ivar spark_batch: SparkBatchOperations operations - :vartype spark_batch: azure.synapse.spark.aio.operations_async.SparkBatchOperations + :vartype spark_batch: azure.synapse.spark.aio.operations.SparkBatchOperations :ivar spark_session: SparkSessionOperations operations - :vartype spark_session: azure.synapse.spark.aio.operations_async.SparkSessionOperations + :vartype spark_session: azure.synapse.spark.aio.operations.SparkSessionOperations :param credential: Credential needed for the client to connect to Azure. :type credential: ~azure.core.credentials_async.AsyncTokenCredential :param endpoint: The workspace development endpoint, for example https://myworkspace.dev.azuresynapse.net. @@ -36,7 +36,6 @@ class SparkClient(object): :type spark_pool_name: str :param livy_api_version: Valid api-version for the request. :type livy_api_version: str - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. """ def __init__( @@ -53,6 +52,7 @@ def __init__( client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} self._serialize = Serializer(client_models) + self._serialize.client_side_validation = False self._deserialize = Deserializer(client_models) self.spark_batch = SparkBatchOperations( diff --git a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/operations_async/__init__.py b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/operations/__init__.py similarity index 80% rename from sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/operations_async/__init__.py rename to sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/operations/__init__.py index 57c274c55c7b..cfceeb39e559 100644 --- a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/operations_async/__init__.py +++ b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/operations/__init__.py @@ -6,8 +6,8 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from ._spark_batch_operations_async import SparkBatchOperations -from ._spark_session_operations_async import SparkSessionOperations +from ._spark_batch_operations import SparkBatchOperations +from ._spark_session_operations import SparkSessionOperations __all__ = [ 'SparkBatchOperations', diff --git a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/operations_async/_spark_batch_operations_async.py b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/operations/_spark_batch_operations.py similarity index 92% rename from sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/operations_async/_spark_batch_operations_async.py rename to sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/operations/_spark_batch_operations.py index 0fb70259d33a..6274c73329d1 100644 --- a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/operations_async/_spark_batch_operations_async.py +++ b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/operations/_spark_batch_operations.py @@ -8,7 +8,7 @@ from typing import Any, Callable, Dict, Generic, Optional, TypeVar import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest @@ -62,8 +62,11 @@ async def get_spark_batch_jobs( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SparkBatchJobCollection"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) + accept = "application/json" # Construct URL url = self.get_spark_batch_jobs.metadata['url'] # type: ignore @@ -85,7 +88,7 @@ async def get_spark_batch_jobs( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -122,9 +125,12 @@ async def create_spark_batch_job( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SparkBatchJob"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.create_spark_batch_job.metadata['url'] # type: ignore @@ -143,13 +149,12 @@ async def create_spark_batch_job( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(spark_batch_job_options, 'SparkBatchJobOptions') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -184,8 +189,11 @@ async def get_spark_batch_job( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SparkBatchJob"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) + accept = "application/json" # Construct URL url = self.get_spark_batch_job.metadata['url'] # type: ignore @@ -204,7 +212,7 @@ async def get_spark_batch_job( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -237,7 +245,9 @@ async def cancel_spark_batch_job( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) # Construct URL diff --git a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/operations_async/_spark_session_operations_async.py b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/operations/_spark_session_operations.py similarity index 91% rename from sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/operations_async/_spark_session_operations_async.py rename to sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/operations/_spark_session_operations.py index 624bc2ae298f..05326e8d0005 100644 --- a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/operations_async/_spark_session_operations_async.py +++ b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/aio/operations/_spark_session_operations.py @@ -8,7 +8,7 @@ from typing import Any, Callable, Dict, Generic, Optional, TypeVar import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest @@ -62,8 +62,11 @@ async def get_spark_sessions( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SparkSessionCollection"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) + accept = "application/json" # Construct URL url = self.get_spark_sessions.metadata['url'] # type: ignore @@ -85,7 +88,7 @@ async def get_spark_sessions( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -122,9 +125,12 @@ async def create_spark_session( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SparkSession"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.create_spark_session.metadata['url'] # type: ignore @@ -143,13 +149,12 @@ async def create_spark_session( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(spark_session_options, 'SparkSessionOptions') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -184,8 +189,11 @@ async def get_spark_session( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SparkSession"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) + accept = "application/json" # Construct URL url = self.get_spark_session.metadata['url'] # type: ignore @@ -204,7 +212,7 @@ async def get_spark_session( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -237,7 +245,9 @@ async def cancel_spark_session( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) # Construct URL @@ -284,7 +294,9 @@ async def reset_spark_session_timeout( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) # Construct URL @@ -331,8 +343,11 @@ async def get_spark_statements( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SparkStatementCollection"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) + accept = "application/json" # Construct URL url = self.get_spark_statements.metadata['url'] # type: ignore @@ -349,7 +364,7 @@ async def get_spark_statements( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -385,9 +400,12 @@ async def create_spark_statement( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SparkStatement"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.create_spark_statement.metadata['url'] # type: ignore @@ -405,13 +423,12 @@ async def create_spark_statement( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(spark_statement_options, 'SparkStatementOptions') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -445,8 +462,11 @@ async def get_spark_statement( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SparkStatement"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) + accept = "application/json" # Construct URL url = self.get_spark_statement.metadata['url'] # type: ignore @@ -464,7 +484,7 @@ async def get_spark_statement( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -500,8 +520,11 @@ async def cancel_spark_statement( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SparkStatementCancellationResult"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) + accept = "application/json" # Construct URL url = self.cancel_spark_statement.metadata['url'] # type: ignore @@ -519,7 +542,7 @@ async def cancel_spark_statement( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) diff --git a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/models/_models.py b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/models/_models.py index bf5029789c38..609f0121140b 100644 --- a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/models/_models.py +++ b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/models/_models.py @@ -474,7 +474,7 @@ class SparkSession(msrest.serialization.Model): :type scheduler: ~azure.synapse.spark.models.SparkScheduler :param plugin: :type plugin: ~azure.synapse.spark.models.SparkServicePlugin - :param errors: + :param errors: The error information. :type errors: list[~azure.synapse.spark.models.SparkServiceError] :param tags: A set of tags. Dictionary of :code:``. :type tags: dict[str, str] @@ -672,7 +672,7 @@ class SparkSessionState(msrest.serialization.Model): :type dead_at: ~datetime.datetime :param shutting_down_at: :type shutting_down_at: ~datetime.datetime - :param terminated_at: + :param terminated_at: the time that at which "killed" livy state was first seen. :type terminated_at: ~datetime.datetime :param recovering_at: :type recovering_at: ~datetime.datetime diff --git a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/models/_models_py3.py b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/models/_models_py3.py index 4e91597b1ee0..a6bbaea80bb6 100644 --- a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/models/_models_py3.py +++ b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/models/_models_py3.py @@ -561,7 +561,7 @@ class SparkSession(msrest.serialization.Model): :type scheduler: ~azure.synapse.spark.models.SparkScheduler :param plugin: :type plugin: ~azure.synapse.spark.models.SparkServicePlugin - :param errors: + :param errors: The error information. :type errors: list[~azure.synapse.spark.models.SparkServiceError] :param tags: A set of tags. Dictionary of :code:``. :type tags: dict[str, str] @@ -799,7 +799,7 @@ class SparkSessionState(msrest.serialization.Model): :type dead_at: ~datetime.datetime :param shutting_down_at: :type shutting_down_at: ~datetime.datetime - :param terminated_at: + :param terminated_at: the time that at which "killed" livy state was first seen. :type terminated_at: ~datetime.datetime :param recovering_at: :type recovering_at: ~datetime.datetime diff --git a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/models/_spark_client_enums.py b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/models/_spark_client_enums.py index 209188ebcea3..19d776f2657c 100644 --- a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/models/_spark_client_enums.py +++ b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/models/_spark_client_enums.py @@ -6,57 +6,75 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from enum import Enum +from enum import Enum, EnumMeta +from six import with_metaclass -class PluginCurrentState(str, Enum): +class _CaseInsensitiveEnumMeta(EnumMeta): + def __getitem__(self, name): + return super().__getitem__(name.upper()) - preparation = "Preparation" - resource_acquisition = "ResourceAcquisition" - queued = "Queued" - submission = "Submission" - monitoring = "Monitoring" - cleanup = "Cleanup" - ended = "Ended" + def __getattr__(cls, name): + """Return the enum member matching `name` + We use __getattr__ instead of descriptors or inserting into the enum + class' __dict__ in order to support `name` and `value` being both + properties for enum members (which live in the class' __dict__) and + enum members themselves. + """ + try: + return cls._member_map_[name.upper()] + except KeyError: + raise AttributeError(name) -class SchedulerCurrentState(str, Enum): - queued = "Queued" - scheduled = "Scheduled" - ended = "Ended" +class PluginCurrentState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): -class SparkBatchJobResultType(str, Enum): + PREPARATION = "Preparation" + RESOURCE_ACQUISITION = "ResourceAcquisition" + QUEUED = "Queued" + SUBMISSION = "Submission" + MONITORING = "Monitoring" + CLEANUP = "Cleanup" + ENDED = "Ended" + +class SchedulerCurrentState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + + QUEUED = "Queued" + SCHEDULED = "Scheduled" + ENDED = "Ended" + +class SparkBatchJobResultType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The Spark batch job result. """ - uncertain = "Uncertain" - succeeded = "Succeeded" - failed = "Failed" - cancelled = "Cancelled" + UNCERTAIN = "Uncertain" + SUCCEEDED = "Succeeded" + FAILED = "Failed" + CANCELLED = "Cancelled" -class SparkErrorSource(str, Enum): +class SparkErrorSource(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - system = "System" - user = "User" - unknown = "Unknown" - dependency = "Dependency" + SYSTEM = "System" + USER = "User" + UNKNOWN = "Unknown" + DEPENDENCY = "Dependency" -class SparkJobType(str, Enum): +class SparkJobType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The job type. """ - spark_batch = "SparkBatch" - spark_session = "SparkSession" + SPARK_BATCH = "SparkBatch" + SPARK_SESSION = "SparkSession" -class SparkSessionResultType(str, Enum): +class SparkSessionResultType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - uncertain = "Uncertain" - succeeded = "Succeeded" - failed = "Failed" - cancelled = "Cancelled" + UNCERTAIN = "Uncertain" + SUCCEEDED = "Succeeded" + FAILED = "Failed" + CANCELLED = "Cancelled" -class SparkStatementLanguageType(str, Enum): +class SparkStatementLanguageType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - spark = "spark" - pyspark = "pyspark" - dotnetspark = "dotnetspark" - sql = "sql" + SPARK = "spark" + PYSPARK = "pyspark" + DOTNETSPARK = "dotnetspark" + SQL = "sql" diff --git a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/operations/_spark_batch_operations.py b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/operations/_spark_batch_operations.py index e1ecb3cfdc0a..fa0fd902866a 100644 --- a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/operations/_spark_batch_operations.py +++ b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/operations/_spark_batch_operations.py @@ -8,7 +8,7 @@ from typing import TYPE_CHECKING import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse @@ -67,8 +67,11 @@ def get_spark_batch_jobs( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SparkBatchJobCollection"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) + accept = "application/json" # Construct URL url = self.get_spark_batch_jobs.metadata['url'] # type: ignore @@ -90,7 +93,7 @@ def get_spark_batch_jobs( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -128,9 +131,12 @@ def create_spark_batch_job( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SparkBatchJob"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.create_spark_batch_job.metadata['url'] # type: ignore @@ -149,13 +155,12 @@ def create_spark_batch_job( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(spark_batch_job_options, 'SparkBatchJobOptions') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -191,8 +196,11 @@ def get_spark_batch_job( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SparkBatchJob"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) + accept = "application/json" # Construct URL url = self.get_spark_batch_job.metadata['url'] # type: ignore @@ -211,7 +219,7 @@ def get_spark_batch_job( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -245,7 +253,9 @@ def cancel_spark_batch_job( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) # Construct URL diff --git a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/operations/_spark_session_operations.py b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/operations/_spark_session_operations.py index 9304a83d0430..a28ae36c3e79 100644 --- a/sdk/synapse/azure-synapse-spark/azure/synapse/spark/operations/_spark_session_operations.py +++ b/sdk/synapse/azure-synapse-spark/azure/synapse/spark/operations/_spark_session_operations.py @@ -8,7 +8,7 @@ from typing import TYPE_CHECKING import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse @@ -67,8 +67,11 @@ def get_spark_sessions( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SparkSessionCollection"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) + accept = "application/json" # Construct URL url = self.get_spark_sessions.metadata['url'] # type: ignore @@ -90,7 +93,7 @@ def get_spark_sessions( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -128,9 +131,12 @@ def create_spark_session( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SparkSession"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.create_spark_session.metadata['url'] # type: ignore @@ -149,13 +155,12 @@ def create_spark_session( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(spark_session_options, 'SparkSessionOptions') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -191,8 +196,11 @@ def get_spark_session( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SparkSession"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) + accept = "application/json" # Construct URL url = self.get_spark_session.metadata['url'] # type: ignore @@ -211,7 +219,7 @@ def get_spark_session( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -245,7 +253,9 @@ def cancel_spark_session( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) # Construct URL @@ -293,7 +303,9 @@ def reset_spark_session_timeout( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) # Construct URL @@ -341,8 +353,11 @@ def get_spark_statements( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SparkStatementCollection"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) + accept = "application/json" # Construct URL url = self.get_spark_statements.metadata['url'] # type: ignore @@ -359,7 +374,7 @@ def get_spark_statements( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -396,9 +411,12 @@ def create_spark_statement( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SparkStatement"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.create_spark_statement.metadata['url'] # type: ignore @@ -416,13 +434,12 @@ def create_spark_statement( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(spark_statement_options, 'SparkStatementOptions') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -457,8 +474,11 @@ def get_spark_statement( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SparkStatement"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) + accept = "application/json" # Construct URL url = self.get_spark_statement.metadata['url'] # type: ignore @@ -476,7 +496,7 @@ def get_spark_statement( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -513,8 +533,11 @@ def cancel_spark_statement( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SparkStatementCancellationResult"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) + accept = "application/json" # Construct URL url = self.cancel_spark_statement.metadata['url'] # type: ignore @@ -532,7 +555,7 @@ def cancel_spark_statement( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)