Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 5 additions & 6 deletions eng/mgmt/mgmtmetadata/datafactory_resource-manager.txt
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,12 @@ AutoRest installed successfully.
Commencing code generation
Generating CSharp code
Executing AutoRest command
cmd.exe /c autorest.cmd https://github.com/Azure/azure-rest-api-specs/blob/main/specification/datafactory/resource-manager/readme.md --csharp --version=v2 --reflect-api-versions --tag=package-2018-06 --csharp-sdks-folder=D:\Projects\azure-sdk-for-net\sdk
Autorest CSharp Version: 2.3.82
2022-10-25 04:48:50 UTC
cmd.exe /c autorest.cmd https://github.com/Azure/azure-rest-api-specs/blob/main/specification/datafactory/resource-manager/readme.md --csharp --version=2.0.4421 --reflect-api-versions --tag=package-2018-06 --csharp-sdks-folder=D:\Code\azure-sdk-for-net\sdk
2023-01-12 03:56:29 UTC
Azure-rest-api-specs repository information
GitHub fork: Azure
Branch: main
Commit: fdd4e5c9b9225698c7f26c75c4b26be5c57e60f8
Commit: 6e011aae8dd50b5acb4bd5e951f5b930a010bc41
AutoRest information
Requested version: v2
Bootstrapper version: autorest@3.6.1
Requested version: 2.0.4421
Bootstrapper version: autorest@2.0.4413
Original file line number Diff line number Diff line change
@@ -1,5 +1,12 @@
# Changelog for the Azure Data Factory V2 .NET SDK

## Version 9.0.0
### Feature Additions
### Breaking Changes
- Added support for credential operations in DataFactory
- Added exportsettings required in Snowflake source
- Added scan folder and spark config support in Sparkjob activity

## Version 8.0.0
### Feature Additions
### Breaking Changes
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -64,5 +64,75 @@ public partial class SynapseSparkJobDefinitionActivity : ExecutionActivity
NumExecutors = numExecutors;
CustomInit();
}

/// <summary>
/// Initializes a new instance of the SynapseSparkJobDefinitionActivity
/// class.
/// </summary>
/// <param name="name">Activity name.</param>
/// <param name="sparkJob">Synapse spark job reference.</param>
/// <param name="additionalProperties">Unmatched properties from the
/// message are deserialized this collection</param>
/// <param name="description">Activity description.</param>
/// <param name="dependsOn">Activity depends on condition.</param>
/// <param name="userProperties">Activity user properties.</param>
/// <param name="linkedServiceName">Linked service reference.</param>
/// <param name="policy">Activity policy.</param>
/// <param name="arguments">User specified arguments to
/// SynapseSparkJobDefinitionActivity.</param>
/// <param name="file">The main file used for the job, which will
/// override the 'file' of the spark job definition you provide. Type:
/// string (or Expression with resultType string).</param>
/// <param name="className">The fully-qualified identifier or the main
/// class that is in the main definition file, which will override the
/// 'className' of the spark job definition you provide. Type: string
/// (or Expression with resultType string).</param>
/// <param name="files">(Deprecated. Please use pythonCodeReference and
/// filesV2) Additional files used for reference in the main definition
/// file, which will override the 'files' of the spark job definition
/// you provide.</param>
/// <param name="pythonCodeReference">Additional python code files used
/// for reference in the main definition file, which will override the
/// 'pyFiles' of the spark job definition you provide.</param>
/// <param name="filesV2">Additional files used for reference in the
/// main definition file, which will override the 'jars' and 'files' of
/// the spark job definition you provide.</param>
/// <param name="targetBigDataPool">The name of the big data pool which
/// will be used to execute the spark batch job, which will override
/// the 'targetBigDataPool' of the spark job definition you
/// provide.</param>
/// <param name="executorSize">Number of core and memory to be used for
/// executors allocated in the specified Spark pool for the job, which
/// will be used for overriding 'executorCores' and 'executorMemory' of
/// the spark job definition you provide. Type: string (or Expression
/// with resultType string).</param>
/// <param name="conf">Spark configuration properties, which will
/// override the 'conf' of the spark job definition you
/// provide.</param>
/// <param name="driverSize">Number of core and memory to be used for
/// driver allocated in the specified Spark pool for the job, which
/// will be used for overriding 'driverCores' and 'driverMemory' of the
/// spark job definition you provide. Type: string (or Expression with
/// resultType string).</param>
/// <param name="numExecutors">Number of executors to launch for this
/// job, which will override the 'numExecutors' of the spark job
/// definition you provide.</param>
public SynapseSparkJobDefinitionActivity(string name, SynapseSparkJobReference sparkJob, IDictionary<string, object> additionalProperties, string description, IList<ActivityDependency> dependsOn, IList<UserProperty> userProperties, LinkedServiceReference linkedServiceName, ActivityPolicy policy, IList<object> arguments, object file, object className = default(object), IList<object> files = default(IList<object>), IList<object> pythonCodeReference = default(IList<object>), IList<object> filesV2 = default(IList<object>), BigDataPoolParametrizationReference targetBigDataPool = default(BigDataPoolParametrizationReference), object executorSize = default(object), object conf = default(object), object driverSize = default(object), int? numExecutors = default(int?))
: base(name, additionalProperties, description, dependsOn, userProperties, linkedServiceName, policy)
{
SparkJob = sparkJob;
Arguments = arguments;
File = file;
ClassName = className;
Files = files;
PythonCodeReference = pythonCodeReference;
FilesV2 = filesV2;
TargetBigDataPool = targetBigDataPool;
ExecutorSize = executorSize;
Conf = conf;
DriverSize = driverSize;
NumExecutors = numExecutors;
CustomInit();
}
}
}
Loading