Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Align recent changes with Swagger #7016

Merged
merged 4 commits into from
Jul 26, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 14 additions & 0 deletions eng/mgmt/mgmtmetadata/datafactory_resource-manager.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
Installing AutoRest version: latest
AutoRest installed successfully.
Commencing code generation
Generating CSharp code
Executing AutoRest command
cmd.exe /c autorest.cmd https://github.com/Azure/azure-rest-api-specs/blob/master/specification/datafactory/resource-manager/readme.md --csharp --version=latest --reflect-api-versions --tag=package-2018-06 --csharp-sdks-folder=D:\Repos\azure-sdk-for-net\sdk
2019-07-26 10:54:28 UTC
Azure-rest-api-specs repository information
GitHub fork: Azure
Branch: master
Commit: 7936730ee4e746dd7af1a0a97b20ba4779a2a35c
AutoRest information
Requested version: latest
Bootstrapper version: autorest@2.0.4283
Original file line number Diff line number Diff line change
@@ -0,0 +1,98 @@
// <auto-generated>
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
// </auto-generated>

namespace Microsoft.Azure.Management.DataFactory.Models
{
using Microsoft.Rest;
using Microsoft.Rest.Serialization;
using Newtonsoft.Json;
using System.Collections;
using System.Collections.Generic;
using System.Linq;

/// <summary>
/// Azure Data Explorer command activity.
/// </summary>
[Newtonsoft.Json.JsonObject("AzureDataExplorerCommand")]
[Rest.Serialization.JsonTransformation]
public partial class AzureDataExplorerCommandActivity : ExecutionActivity
{
/// <summary>
/// Initializes a new instance of the AzureDataExplorerCommandActivity
/// class.
/// </summary>
public AzureDataExplorerCommandActivity()
{
CustomInit();
}

/// <summary>
/// Initializes a new instance of the AzureDataExplorerCommandActivity
/// class.
/// </summary>
/// <param name="name">Activity name.</param>
/// <param name="command">A control command, according to the Azure
/// Data Explorer command syntax. Type: string (or Expression with
/// resultType string).</param>
/// <param name="additionalProperties">Unmatched properties from the
/// message are deserialized this collection</param>
/// <param name="description">Activity description.</param>
/// <param name="dependsOn">Activity depends on condition.</param>
/// <param name="userProperties">Activity user properties.</param>
/// <param name="linkedServiceName">Linked service reference.</param>
/// <param name="policy">Activity policy.</param>
/// <param name="commandTimeout">Control command timeout. Type: string
/// (or Expression with resultType string), pattern:
/// ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))..)</param>
public AzureDataExplorerCommandActivity(string name, object command, IDictionary<string, object> additionalProperties = default(IDictionary<string, object>), string description = default(string), IList<ActivityDependency> dependsOn = default(IList<ActivityDependency>), IList<UserProperty> userProperties = default(IList<UserProperty>), LinkedServiceReference linkedServiceName = default(LinkedServiceReference), ActivityPolicy policy = default(ActivityPolicy), object commandTimeout = default(object))
: base(name, additionalProperties, description, dependsOn, userProperties, linkedServiceName, policy)
{
Command = command;
CommandTimeout = commandTimeout;
CustomInit();
}

/// <summary>
/// An initialization method that performs custom operations like setting defaults
/// </summary>
partial void CustomInit();

/// <summary>
/// Gets or sets a control command, according to the Azure Data
/// Explorer command syntax. Type: string (or Expression with
/// resultType string).
/// </summary>
[JsonProperty(PropertyName = "typeProperties.command")]
public object Command { get; set; }

/// <summary>
/// Gets or sets control command timeout. Type: string (or Expression
/// with resultType string), pattern:
/// ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))..)
/// </summary>
[JsonProperty(PropertyName = "typeProperties.commandTimeout")]
public object CommandTimeout { get; set; }

/// <summary>
/// Validate the object.
/// </summary>
/// <exception cref="ValidationException">
/// Thrown if validation fails
/// </exception>
public override void Validate()
{
base.Validate();
if (Command == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "Command");
}
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@
// <auto-generated>
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
// </auto-generated>

namespace Microsoft.Azure.Management.DataFactory.Models
{
using Newtonsoft.Json;
using System.Collections;
using System.Collections.Generic;
using System.Linq;

/// <summary>
/// A copy activity Azure PostgreSQL sink.
/// </summary>
public partial class AzurePostgreSqlSink : CopySink
{
/// <summary>
/// Initializes a new instance of the AzurePostgreSqlSink class.
/// </summary>
public AzurePostgreSqlSink()
{
CustomInit();
}

/// <summary>
/// Initializes a new instance of the AzurePostgreSqlSink class.
/// </summary>
/// <param name="additionalProperties">Unmatched properties from the
/// message are deserialized this collection</param>
/// <param name="writeBatchSize">Write batch size. Type: integer (or
/// Expression with resultType integer), minimum: 0.</param>
/// <param name="writeBatchTimeout">Write batch timeout. Type: string
/// (or Expression with resultType string), pattern:
/// ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).</param>
/// <param name="sinkRetryCount">Sink retry count. Type: integer (or
/// Expression with resultType integer).</param>
/// <param name="sinkRetryWait">Sink retry wait. Type: string (or
/// Expression with resultType string), pattern:
/// ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).</param>
/// <param name="maxConcurrentConnections">The maximum concurrent
/// connection count for the sink data store. Type: integer (or
/// Expression with resultType integer).</param>
/// <param name="preCopyScript">A query to execute before starting the
/// copy. Type: string (or Expression with resultType string).</param>
public AzurePostgreSqlSink(IDictionary<string, object> additionalProperties = default(IDictionary<string, object>), object writeBatchSize = default(object), object writeBatchTimeout = default(object), object sinkRetryCount = default(object), object sinkRetryWait = default(object), object maxConcurrentConnections = default(object), object preCopyScript = default(object))
: base(additionalProperties, writeBatchSize, writeBatchTimeout, sinkRetryCount, sinkRetryWait, maxConcurrentConnections)
{
PreCopyScript = preCopyScript;
CustomInit();
}

/// <summary>
/// An initialization method that performs custom operations like setting defaults
/// </summary>
partial void CustomInit();

/// <summary>
/// Gets or sets a query to execute before starting the copy. Type:
/// string (or Expression with resultType string).
/// </summary>
[JsonProperty(PropertyName = "preCopyScript")]
public object PreCopyScript { get; set; }

}
}
Original file line number Diff line number Diff line change
Expand Up @@ -53,12 +53,21 @@ public AzurePostgreSqlTableDataset()
/// describing the Dataset.</param>
/// <param name="folder">The folder that this Dataset is in. If not
/// specified, Dataset will appear at the root level.</param>
/// <param name="tableName">The table name. Type: string (or Expression
/// with resultType string).</param>
public AzurePostgreSqlTableDataset(LinkedServiceReference linkedServiceName, IDictionary<string, object> additionalProperties = default(IDictionary<string, object>), string description = default(string), object structure = default(object), object schema = default(object), IDictionary<string, ParameterSpecification> parameters = default(IDictionary<string, ParameterSpecification>), IList<object> annotations = default(IList<object>), DatasetFolder folder = default(DatasetFolder), object tableName = default(object))
/// <param name="tableName">The table name of the Azure PostgreSQL
/// database which includes both schema and table. Type: string (or
/// Expression with resultType string).</param>
/// <param name="table">The table name of the Azure PostgreSQL
/// database. Type: string (or Expression with resultType
/// string).</param>
/// <param name="azurePostgreSqlTableDatasetSchema">The schema name of
/// the Azure PostgreSQL database. Type: string (or Expression with
/// resultType string).</param>
public AzurePostgreSqlTableDataset(LinkedServiceReference linkedServiceName, IDictionary<string, object> additionalProperties = default(IDictionary<string, object>), string description = default(string), object structure = default(object), object schema = default(object), IDictionary<string, ParameterSpecification> parameters = default(IDictionary<string, ParameterSpecification>), IList<object> annotations = default(IList<object>), DatasetFolder folder = default(DatasetFolder), object tableName = default(object), object table = default(object), object azurePostgreSqlTableDatasetSchema = default(object))
: base(linkedServiceName, additionalProperties, description, structure, schema, parameters, annotations, folder)
{
TableName = tableName;
Table = table;
AzurePostgreSqlTableDatasetSchema = azurePostgreSqlTableDatasetSchema;
CustomInit();
}

Expand All @@ -68,12 +77,27 @@ public AzurePostgreSqlTableDataset()
partial void CustomInit();

/// <summary>
/// Gets or sets the table name. Type: string (or Expression with
/// Gets or sets the table name of the Azure PostgreSQL database which
/// includes both schema and table. Type: string (or Expression with
/// resultType string).
/// </summary>
[JsonProperty(PropertyName = "typeProperties.tableName")]
public object TableName { get; set; }

/// <summary>
/// Gets or sets the table name of the Azure PostgreSQL database. Type:
/// string (or Expression with resultType string).
/// </summary>
[JsonProperty(PropertyName = "typeProperties.table")]
public object Table { get; set; }

/// <summary>
/// Gets or sets the schema name of the Azure PostgreSQL database.
/// Type: string (or Expression with resultType string).
/// </summary>
[JsonProperty(PropertyName = "typeProperties.schema")]
public object AzurePostgreSqlTableDatasetSchema { get; set; }

/// <summary>
/// Validate the object.
/// </summary>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ public CosmosDbMongoDbApiSource()
/// <param name="batchSize">Specifies the number of documents to return
/// in each batch of the response from MongoDB instance. In most cases,
/// modifying the batch size will not affect the user or the
/// application. This propertys main purpose is to avoid hit the
/// application. This property's main purpose is to avoid hit the
/// limitation of response size. Type: integer (or Expression with
/// resultType integer).</param>
public CosmosDbMongoDbApiSource(IDictionary<string, object> additionalProperties = default(IDictionary<string, object>), object sourceRetryCount = default(object), object sourceRetryWait = default(object), object maxConcurrentConnections = default(object), object filter = default(object), MongoDbCursorMethodsProperties cursorMethods = default(MongoDbCursorMethodsProperties), object batchSize = default(object))
Expand Down Expand Up @@ -86,7 +86,7 @@ public CosmosDbMongoDbApiSource()
/// Gets or sets specifies the number of documents to return in each
/// batch of the response from MongoDB instance. In most cases,
/// modifying the batch size will not affect the user or the
/// application. This propertys main purpose is to avoid hit the
/// application. This property's main purpose is to avoid hit the
/// limitation of response size. Type: integer (or Expression with
/// resultType integer).
/// </summary>
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
// <auto-generated>
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
// </auto-generated>

namespace Microsoft.Azure.Management.DataFactory.Models
{
using Newtonsoft.Json;
using System.Collections;
using System.Collections.Generic;
using System.Linq;

/// <summary>
/// A copy activity source for Db2 databases.
/// </summary>
public partial class Db2Source : CopySource
{
/// <summary>
/// Initializes a new instance of the Db2Source class.
/// </summary>
public Db2Source()
{
CustomInit();
}

/// <summary>
/// Initializes a new instance of the Db2Source class.
/// </summary>
/// <param name="additionalProperties">Unmatched properties from the
/// message are deserialized this collection</param>
/// <param name="sourceRetryCount">Source retry count. Type: integer
/// (or Expression with resultType integer).</param>
/// <param name="sourceRetryWait">Source retry wait. Type: string (or
/// Expression with resultType string), pattern:
/// ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).</param>
/// <param name="maxConcurrentConnections">The maximum concurrent
/// connection count for the source data store. Type: integer (or
/// Expression with resultType integer).</param>
/// <param name="query">Database query. Type: string (or Expression
/// with resultType string).</param>
public Db2Source(IDictionary<string, object> additionalProperties = default(IDictionary<string, object>), object sourceRetryCount = default(object), object sourceRetryWait = default(object), object maxConcurrentConnections = default(object), object query = default(object))
: base(additionalProperties, sourceRetryCount, sourceRetryWait, maxConcurrentConnections)
{
Query = query;
CustomInit();
}

/// <summary>
/// An initialization method that performs custom operations like setting defaults
/// </summary>
partial void CustomInit();

/// <summary>
/// Gets or sets database query. Type: string (or Expression with
/// resultType string).
/// </summary>
[JsonProperty(PropertyName = "query")]
public object Query { get; set; }

}
}
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ public MongoDbV2Source()
/// <param name="batchSize">Specifies the number of documents to return
/// in each batch of the response from MongoDB instance. In most cases,
/// modifying the batch size will not affect the user or the
/// application. This propertys main purpose is to avoid hit the
/// application. This property's main purpose is to avoid hit the
/// limitation of response size. Type: integer (or Expression with
/// resultType integer).</param>
public MongoDbV2Source(IDictionary<string, object> additionalProperties = default(IDictionary<string, object>), object sourceRetryCount = default(object), object sourceRetryWait = default(object), object maxConcurrentConnections = default(object), object filter = default(object), MongoDbCursorMethodsProperties cursorMethods = default(MongoDbCursorMethodsProperties), object batchSize = default(object))
Expand Down Expand Up @@ -86,7 +86,7 @@ public MongoDbV2Source()
/// Gets or sets specifies the number of documents to return in each
/// batch of the response from MongoDB instance. In most cases,
/// modifying the batch size will not affect the user or the
/// application. This propertys main purpose is to avoid hit the
/// application. This property's main purpose is to avoid hit the
/// limitation of response size. Type: integer (or Expression with
/// resultType integer).
/// </summary>
Expand Down
Loading