Skip to content

Commit b8d54f0

Browse files
authored
[DataFactory]Added new features into 9.0.0 (#33440)
1 parent acf55a7 commit b8d54f0

15 files changed

+2000
-18
lines changed

eng/mgmt/mgmtmetadata/datafactory_resource-manager.txt

Lines changed: 5 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -3,13 +3,12 @@ AutoRest installed successfully.
33
Commencing code generation
44
Generating CSharp code
55
Executing AutoRest command
6-
cmd.exe /c autorest.cmd https://github.com/Azure/azure-rest-api-specs/blob/main/specification/datafactory/resource-manager/readme.md --csharp --version=v2 --reflect-api-versions --tag=package-2018-06 --csharp-sdks-folder=D:\Projects\azure-sdk-for-net\sdk
7-
Autorest CSharp Version: 2.3.82
8-
2022-10-25 04:48:50 UTC
6+
cmd.exe /c autorest.cmd https://github.com/Azure/azure-rest-api-specs/blob/main/specification/datafactory/resource-manager/readme.md --csharp --version=2.0.4421 --reflect-api-versions --tag=package-2018-06 --csharp-sdks-folder=D:\Code\azure-sdk-for-net\sdk
7+
2023-01-12 03:56:29 UTC
98
Azure-rest-api-specs repository information
109
GitHub fork: Azure
1110
Branch: main
12-
Commit: fdd4e5c9b9225698c7f26c75c4b26be5c57e60f8
11+
Commit: 6e011aae8dd50b5acb4bd5e951f5b930a010bc41
1312
AutoRest information
14-
Requested version: v2
15-
Bootstrapper version: autorest@3.6.1
13+
Requested version: 2.0.4421
14+
Bootstrapper version: autorest@2.0.4413

sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/CHANGELOG.md

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,12 @@
11
# Changelog for the Azure Data Factory V2 .NET SDK
22

3+
## Version 9.0.0
4+
### Feature Additions
5+
### Breaking Changes
6+
- Added support for credential operations in DataFactory
7+
- Added exportsettings required in Snowflake source
8+
- Added scan folder and spark config support in Sparkjob activity
9+
310
## Version 8.0.0
411
### Feature Additions
512
### Breaking Changes

sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Customizations/SynapseSparkJobDefinitionActivity.cs

Lines changed: 70 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -64,5 +64,75 @@ public partial class SynapseSparkJobDefinitionActivity : ExecutionActivity
6464
NumExecutors = numExecutors;
6565
CustomInit();
6666
}
67+
68+
/// <summary>
69+
/// Initializes a new instance of the SynapseSparkJobDefinitionActivity
70+
/// class.
71+
/// </summary>
72+
/// <param name="name">Activity name.</param>
73+
/// <param name="sparkJob">Synapse spark job reference.</param>
74+
/// <param name="additionalProperties">Unmatched properties from the
75+
/// message are deserialized this collection</param>
76+
/// <param name="description">Activity description.</param>
77+
/// <param name="dependsOn">Activity depends on condition.</param>
78+
/// <param name="userProperties">Activity user properties.</param>
79+
/// <param name="linkedServiceName">Linked service reference.</param>
80+
/// <param name="policy">Activity policy.</param>
81+
/// <param name="arguments">User specified arguments to
82+
/// SynapseSparkJobDefinitionActivity.</param>
83+
/// <param name="file">The main file used for the job, which will
84+
/// override the 'file' of the spark job definition you provide. Type:
85+
/// string (or Expression with resultType string).</param>
86+
/// <param name="className">The fully-qualified identifier or the main
87+
/// class that is in the main definition file, which will override the
88+
/// 'className' of the spark job definition you provide. Type: string
89+
/// (or Expression with resultType string).</param>
90+
/// <param name="files">(Deprecated. Please use pythonCodeReference and
91+
/// filesV2) Additional files used for reference in the main definition
92+
/// file, which will override the 'files' of the spark job definition
93+
/// you provide.</param>
94+
/// <param name="pythonCodeReference">Additional python code files used
95+
/// for reference in the main definition file, which will override the
96+
/// 'pyFiles' of the spark job definition you provide.</param>
97+
/// <param name="filesV2">Additional files used for reference in the
98+
/// main definition file, which will override the 'jars' and 'files' of
99+
/// the spark job definition you provide.</param>
100+
/// <param name="targetBigDataPool">The name of the big data pool which
101+
/// will be used to execute the spark batch job, which will override
102+
/// the 'targetBigDataPool' of the spark job definition you
103+
/// provide.</param>
104+
/// <param name="executorSize">Number of core and memory to be used for
105+
/// executors allocated in the specified Spark pool for the job, which
106+
/// will be used for overriding 'executorCores' and 'executorMemory' of
107+
/// the spark job definition you provide. Type: string (or Expression
108+
/// with resultType string).</param>
109+
/// <param name="conf">Spark configuration properties, which will
110+
/// override the 'conf' of the spark job definition you
111+
/// provide.</param>
112+
/// <param name="driverSize">Number of core and memory to be used for
113+
/// driver allocated in the specified Spark pool for the job, which
114+
/// will be used for overriding 'driverCores' and 'driverMemory' of the
115+
/// spark job definition you provide. Type: string (or Expression with
116+
/// resultType string).</param>
117+
/// <param name="numExecutors">Number of executors to launch for this
118+
/// job, which will override the 'numExecutors' of the spark job
119+
/// definition you provide.</param>
120+
public SynapseSparkJobDefinitionActivity(string name, SynapseSparkJobReference sparkJob, IDictionary<string, object> additionalProperties, string description, IList<ActivityDependency> dependsOn, IList<UserProperty> userProperties, LinkedServiceReference linkedServiceName, ActivityPolicy policy, IList<object> arguments, object file, object className = default(object), IList<object> files = default(IList<object>), IList<object> pythonCodeReference = default(IList<object>), IList<object> filesV2 = default(IList<object>), BigDataPoolParametrizationReference targetBigDataPool = default(BigDataPoolParametrizationReference), object executorSize = default(object), object conf = default(object), object driverSize = default(object), int? numExecutors = default(int?))
121+
: base(name, additionalProperties, description, dependsOn, userProperties, linkedServiceName, policy)
122+
{
123+
SparkJob = sparkJob;
124+
Arguments = arguments;
125+
File = file;
126+
ClassName = className;
127+
Files = files;
128+
PythonCodeReference = pythonCodeReference;
129+
FilesV2 = filesV2;
130+
TargetBigDataPool = targetBigDataPool;
131+
ExecutorSize = executorSize;
132+
Conf = conf;
133+
DriverSize = driverSize;
134+
NumExecutors = numExecutors;
135+
CustomInit();
136+
}
67137
}
68138
}

0 commit comments

Comments
 (0)