From 977d83020cba26cfae3626731fefb9de42a30709 Mon Sep 17 00:00:00 2001 From: SDKAuto Date: Thu, 14 Jul 2022 05:58:30 +0000 Subject: [PATCH] CodeGen from PR 19769 in Azure/azure-rest-api-specs Merge eea5dd2657072068055a84bb77a51d86e59b3e09 into 1ba198d96e2c1dbd4dd392a5d2a5a53d5160d55f --- .../CHANGELOG.md | 4 +- .../README.md | 2 +- .../datafactory/DataFactoryManager.java | 2 +- ...eArtifactsLinkedServiceTypeProperties.java | 89 +++++ .../HttpLinkedServiceTypeProperties.java | 6 +- ...SynapseNotebookActivityTypeProperties.java | 253 +++++++++++++ ...SynapseSparkJobActivityTypeProperties.java | 336 +++++++++++++++++ .../AzureSynapseArtifactsLinkedService.java | 134 +++++++ .../BigDataPoolParametrizationReference.java | 90 +++++ .../models/BigDataPoolReferenceType.java | 35 ++ .../datafactory/models/ExecutionActivity.java | 4 +- .../datafactory/models/HttpLinkedService.java | 4 +- .../datafactory/models/LinkedService.java | 3 +- .../datafactory/models/NotebookParameter.java | 73 ++++ .../models/NotebookParameterType.java | 44 +++ .../models/NotebookReferenceType.java | 35 ++ .../models/SparkJobReferenceType.java | 36 ++ .../models/SynapseNotebookActivity.java | 270 ++++++++++++++ .../models/SynapseNotebookReference.java | 87 +++++ .../SynapseSparkJobDefinitionActivity.java | 349 ++++++++++++++++++ .../models/SynapseSparkJobReference.java | 86 +++++ .../WebLinkedServiceTypeProperties.java | 6 +- 22 files changed, 1935 insertions(+), 13 deletions(-) create mode 100644 sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureSynapseArtifactsLinkedServiceTypeProperties.java create mode 100644 sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SynapseNotebookActivityTypeProperties.java create mode 100644 sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SynapseSparkJobActivityTypeProperties.java create mode 100644 sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSynapseArtifactsLinkedService.java create mode 100644 sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BigDataPoolParametrizationReference.java create mode 100644 sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BigDataPoolReferenceType.java create mode 100644 sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/NotebookParameter.java create mode 100644 sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/NotebookParameterType.java create mode 100644 sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/NotebookReferenceType.java create mode 100644 sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SparkJobReferenceType.java create mode 100644 sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SynapseNotebookActivity.java create mode 100644 sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SynapseNotebookReference.java create mode 100644 sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SynapseSparkJobDefinitionActivity.java create mode 100644 sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SynapseSparkJobReference.java diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/CHANGELOG.md b/sdk/datafactory/azure-resourcemanager-datafactory/CHANGELOG.md index 44c96fc2f830..4a06df3176aa 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/CHANGELOG.md +++ b/sdk/datafactory/azure-resourcemanager-datafactory/CHANGELOG.md @@ -1,6 +1,8 @@ # Release History -## 1.0.0-beta.17 (Unreleased) +## 1.0.0-beta.1 (2022-07-14) + +- Azure Resource Manager DataFactory client library for Java. This package contains Microsoft Azure SDK for DataFactory Management SDK. The Azure Data Factory V2 management API provides a RESTful set of web services that interact with Azure Data Factory V2 services. Package tag package-2018-06. For documentation on how to use this package, please see [Azure Management Libraries for Java](https://aka.ms/azsdk/java/mgmt). ### Features Added diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/README.md b/sdk/datafactory/azure-resourcemanager-datafactory/README.md index aed548ba7545..4b169c2f5aa6 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/README.md +++ b/sdk/datafactory/azure-resourcemanager-datafactory/README.md @@ -32,7 +32,7 @@ Various documentation is available to help you get started com.azure.resourcemanager azure-resourcemanager-datafactory - 1.0.0-beta.16 + 1.0.0-beta.17 ``` [//]: # ({x-version-update-end}) diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/DataFactoryManager.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/DataFactoryManager.java index 29933c680508..22ca53205881 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/DataFactoryManager.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/DataFactoryManager.java @@ -286,7 +286,7 @@ public DataFactoryManager authenticate(TokenCredential credential, AzureProfile .append("-") .append("com.azure.resourcemanager.datafactory") .append("/") - .append("1.0.0-beta.16"); + .append("1.0.0-beta.1"); if (!Configuration.getGlobalConfiguration().get("AZURE_TELEMETRY_DISABLED", false)) { userAgentBuilder .append(" (") diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureSynapseArtifactsLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureSynapseArtifactsLinkedServiceTypeProperties.java new file mode 100644 index 000000000000..a702b7a1150c --- /dev/null +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureSynapseArtifactsLinkedServiceTypeProperties.java @@ -0,0 +1,89 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.datafactory.fluent.models; + +import com.azure.core.annotation.Fluent; +import com.azure.core.util.logging.ClientLogger; +import com.fasterxml.jackson.annotation.JsonProperty; + +/** Azure Synapse Analytics (Artifacts) linked service properties. */ +@Fluent +public final class AzureSynapseArtifactsLinkedServiceTypeProperties { + /* + * https://.dev.azuresynapse.net, Azure Synapse Analytics + * workspace URL. Type: string (or Expression with resultType string). + */ + @JsonProperty(value = "endpoint", required = true) + private Object endpoint; + + /* + * Required to specify MSI, if using system assigned managed identity as + * authentication method. Type: string (or Expression with resultType + * string). + */ + @JsonProperty(value = "authentication") + private Object authentication; + + /** + * Get the endpoint property: https://<workspacename>.dev.azuresynapse.net, Azure Synapse Analytics workspace + * URL. Type: string (or Expression with resultType string). + * + * @return the endpoint value. + */ + public Object endpoint() { + return this.endpoint; + } + + /** + * Set the endpoint property: https://<workspacename>.dev.azuresynapse.net, Azure Synapse Analytics workspace + * URL. Type: string (or Expression with resultType string). + * + * @param endpoint the endpoint value to set. + * @return the AzureSynapseArtifactsLinkedServiceTypeProperties object itself. + */ + public AzureSynapseArtifactsLinkedServiceTypeProperties withEndpoint(Object endpoint) { + this.endpoint = endpoint; + return this; + } + + /** + * Get the authentication property: Required to specify MSI, if using system assigned managed identity as + * authentication method. Type: string (or Expression with resultType string). + * + * @return the authentication value. + */ + public Object authentication() { + return this.authentication; + } + + /** + * Set the authentication property: Required to specify MSI, if using system assigned managed identity as + * authentication method. Type: string (or Expression with resultType string). + * + * @param authentication the authentication value to set. + * @return the AzureSynapseArtifactsLinkedServiceTypeProperties object itself. + */ + public AzureSynapseArtifactsLinkedServiceTypeProperties withAuthentication(Object authentication) { + this.authentication = authentication; + return this; + } + + /** + * Validates the instance. + * + * @throws IllegalArgumentException thrown if the instance is not valid. + */ + public void validate() { + if (endpoint() == null) { + throw LOGGER + .logExceptionAsError( + new IllegalArgumentException( + "Missing required property endpoint in model" + + " AzureSynapseArtifactsLinkedServiceTypeProperties")); + } + } + + private static final ClientLogger LOGGER = new ClientLogger(AzureSynapseArtifactsLinkedServiceTypeProperties.class); +} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HttpLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HttpLinkedServiceTypeProperties.java index 799291ad1cb7..7080687e2972 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HttpLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/HttpLinkedServiceTypeProperties.java @@ -14,7 +14,7 @@ @Fluent public final class HttpLinkedServiceTypeProperties { /* - * The base URL of the HTTP endpoint, e.g. http://www.microsoft.com. Type: + * The base URL of the HTTP endpoint, e.g. https://www.microsoft.com. Type: * string (or Expression with resultType string). */ @JsonProperty(value = "url", required = true) @@ -82,7 +82,7 @@ public final class HttpLinkedServiceTypeProperties { private Object enableServerCertificateValidation; /** - * Get the url property: The base URL of the HTTP endpoint, e.g. http://www.microsoft.com. Type: string (or + * Get the url property: The base URL of the HTTP endpoint, e.g. https://www.microsoft.com. Type: string (or * Expression with resultType string). * * @return the url value. @@ -92,7 +92,7 @@ public Object url() { } /** - * Set the url property: The base URL of the HTTP endpoint, e.g. http://www.microsoft.com. Type: string (or + * Set the url property: The base URL of the HTTP endpoint, e.g. https://www.microsoft.com. Type: string (or * Expression with resultType string). * * @param url the url value to set. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SynapseNotebookActivityTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SynapseNotebookActivityTypeProperties.java new file mode 100644 index 000000000000..b25d685ed913 --- /dev/null +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SynapseNotebookActivityTypeProperties.java @@ -0,0 +1,253 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.datafactory.fluent.models; + +import com.azure.core.annotation.Fluent; +import com.azure.core.util.logging.ClientLogger; +import com.azure.resourcemanager.datafactory.models.BigDataPoolParametrizationReference; +import com.azure.resourcemanager.datafactory.models.NotebookParameter; +import com.azure.resourcemanager.datafactory.models.SynapseNotebookReference; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Map; + +/** Execute Synapse notebook activity properties. */ +@Fluent +public final class SynapseNotebookActivityTypeProperties { + /* + * Synapse notebook reference. + */ + @JsonProperty(value = "notebook", required = true) + private SynapseNotebookReference notebook; + + /* + * The name of the big data pool which will be used to execute the + * notebook. + */ + @JsonProperty(value = "sparkPool") + private BigDataPoolParametrizationReference sparkPool; + + /* + * Notebook parameters. + */ + @JsonProperty(value = "parameters") + @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) + private Map parameters; + + /* + * Number of core and memory to be used for executors allocated in the + * specified Spark pool for the session, which will be used for overriding + * 'executorCores' and 'executorMemory' of the notebook you provide. Type: + * string (or Expression with resultType string). + */ + @JsonProperty(value = "executorSize") + private Object executorSize; + + /* + * Spark configuration properties, which will override the 'conf' of the + * notebook you provide. + */ + @JsonProperty(value = "conf") + private Object conf; + + /* + * Number of core and memory to be used for driver allocated in the + * specified Spark pool for the session, which will be used for overriding + * 'driverCores' and 'driverMemory' of the notebook you provide. Type: + * string (or Expression with resultType string). + */ + @JsonProperty(value = "driverSize") + private Object driverSize; + + /* + * Number of executors to launch for this session, which will override the + * 'numExecutors' of the notebook you provide. + */ + @JsonProperty(value = "numExecutors") + private Integer numExecutors; + + /** + * Get the notebook property: Synapse notebook reference. + * + * @return the notebook value. + */ + public SynapseNotebookReference notebook() { + return this.notebook; + } + + /** + * Set the notebook property: Synapse notebook reference. + * + * @param notebook the notebook value to set. + * @return the SynapseNotebookActivityTypeProperties object itself. + */ + public SynapseNotebookActivityTypeProperties withNotebook(SynapseNotebookReference notebook) { + this.notebook = notebook; + return this; + } + + /** + * Get the sparkPool property: The name of the big data pool which will be used to execute the notebook. + * + * @return the sparkPool value. + */ + public BigDataPoolParametrizationReference sparkPool() { + return this.sparkPool; + } + + /** + * Set the sparkPool property: The name of the big data pool which will be used to execute the notebook. + * + * @param sparkPool the sparkPool value to set. + * @return the SynapseNotebookActivityTypeProperties object itself. + */ + public SynapseNotebookActivityTypeProperties withSparkPool(BigDataPoolParametrizationReference sparkPool) { + this.sparkPool = sparkPool; + return this; + } + + /** + * Get the parameters property: Notebook parameters. + * + * @return the parameters value. + */ + public Map parameters() { + return this.parameters; + } + + /** + * Set the parameters property: Notebook parameters. + * + * @param parameters the parameters value to set. + * @return the SynapseNotebookActivityTypeProperties object itself. + */ + public SynapseNotebookActivityTypeProperties withParameters(Map parameters) { + this.parameters = parameters; + return this; + } + + /** + * Get the executorSize property: Number of core and memory to be used for executors allocated in the specified + * Spark pool for the session, which will be used for overriding 'executorCores' and 'executorMemory' of the + * notebook you provide. Type: string (or Expression with resultType string). + * + * @return the executorSize value. + */ + public Object executorSize() { + return this.executorSize; + } + + /** + * Set the executorSize property: Number of core and memory to be used for executors allocated in the specified + * Spark pool for the session, which will be used for overriding 'executorCores' and 'executorMemory' of the + * notebook you provide. Type: string (or Expression with resultType string). + * + * @param executorSize the executorSize value to set. + * @return the SynapseNotebookActivityTypeProperties object itself. + */ + public SynapseNotebookActivityTypeProperties withExecutorSize(Object executorSize) { + this.executorSize = executorSize; + return this; + } + + /** + * Get the conf property: Spark configuration properties, which will override the 'conf' of the notebook you + * provide. + * + * @return the conf value. + */ + public Object conf() { + return this.conf; + } + + /** + * Set the conf property: Spark configuration properties, which will override the 'conf' of the notebook you + * provide. + * + * @param conf the conf value to set. + * @return the SynapseNotebookActivityTypeProperties object itself. + */ + public SynapseNotebookActivityTypeProperties withConf(Object conf) { + this.conf = conf; + return this; + } + + /** + * Get the driverSize property: Number of core and memory to be used for driver allocated in the specified Spark + * pool for the session, which will be used for overriding 'driverCores' and 'driverMemory' of the notebook you + * provide. Type: string (or Expression with resultType string). + * + * @return the driverSize value. + */ + public Object driverSize() { + return this.driverSize; + } + + /** + * Set the driverSize property: Number of core and memory to be used for driver allocated in the specified Spark + * pool for the session, which will be used for overriding 'driverCores' and 'driverMemory' of the notebook you + * provide. Type: string (or Expression with resultType string). + * + * @param driverSize the driverSize value to set. + * @return the SynapseNotebookActivityTypeProperties object itself. + */ + public SynapseNotebookActivityTypeProperties withDriverSize(Object driverSize) { + this.driverSize = driverSize; + return this; + } + + /** + * Get the numExecutors property: Number of executors to launch for this session, which will override the + * 'numExecutors' of the notebook you provide. + * + * @return the numExecutors value. + */ + public Integer numExecutors() { + return this.numExecutors; + } + + /** + * Set the numExecutors property: Number of executors to launch for this session, which will override the + * 'numExecutors' of the notebook you provide. + * + * @param numExecutors the numExecutors value to set. + * @return the SynapseNotebookActivityTypeProperties object itself. + */ + public SynapseNotebookActivityTypeProperties withNumExecutors(Integer numExecutors) { + this.numExecutors = numExecutors; + return this; + } + + /** + * Validates the instance. + * + * @throws IllegalArgumentException thrown if the instance is not valid. + */ + public void validate() { + if (notebook() == null) { + throw LOGGER + .logExceptionAsError( + new IllegalArgumentException( + "Missing required property notebook in model SynapseNotebookActivityTypeProperties")); + } else { + notebook().validate(); + } + if (sparkPool() != null) { + sparkPool().validate(); + } + if (parameters() != null) { + parameters() + .values() + .forEach( + e -> { + if (e != null) { + e.validate(); + } + }); + } + } + + private static final ClientLogger LOGGER = new ClientLogger(SynapseNotebookActivityTypeProperties.class); +} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SynapseSparkJobActivityTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SynapseSparkJobActivityTypeProperties.java new file mode 100644 index 000000000000..dc062c6f302a --- /dev/null +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/SynapseSparkJobActivityTypeProperties.java @@ -0,0 +1,336 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.datafactory.fluent.models; + +import com.azure.core.annotation.Fluent; +import com.azure.core.util.logging.ClientLogger; +import com.azure.resourcemanager.datafactory.models.BigDataPoolParametrizationReference; +import com.azure.resourcemanager.datafactory.models.SynapseSparkJobReference; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.List; + +/** Execute spark job activity properties. */ +@Fluent +public final class SynapseSparkJobActivityTypeProperties { + /* + * Synapse spark job reference. + */ + @JsonProperty(value = "sparkJob", required = true) + private SynapseSparkJobReference sparkJob; + + /* + * User specified arguments to SynapseSparkJobDefinitionActivity. + */ + @JsonProperty(value = "args") + private List arguments; + + /* + * The main file used for the job, which will override the 'file' of the + * spark job definition you provide. Type: string (or Expression with + * resultType string). + */ + @JsonProperty(value = "file") + private Object file; + + /* + * The fully-qualified identifier or the main class that is in the main + * definition file, which will override the 'className' of the spark job + * definition you provide. Type: string (or Expression with resultType + * string). + */ + @JsonProperty(value = "className") + private Object className; + + /* + * Additional files used for reference in the main definition file, which + * will override the 'files' of the spark job definition you provide. + */ + @JsonProperty(value = "files") + private List files; + + /* + * The name of the big data pool which will be used to execute the spark + * batch job, which will override the 'targetBigDataPool' of the spark job + * definition you provide. + */ + @JsonProperty(value = "targetBigDataPool") + private BigDataPoolParametrizationReference targetBigDataPool; + + /* + * Number of core and memory to be used for executors allocated in the + * specified Spark pool for the job, which will be used for overriding + * 'executorCores' and 'executorMemory' of the spark job definition you + * provide. Type: string (or Expression with resultType string). + */ + @JsonProperty(value = "executorSize") + private Object executorSize; + + /* + * Spark configuration properties, which will override the 'conf' of the + * spark job definition you provide. + */ + @JsonProperty(value = "conf") + private Object conf; + + /* + * Number of core and memory to be used for driver allocated in the + * specified Spark pool for the job, which will be used for overriding + * 'driverCores' and 'driverMemory' of the spark job definition you + * provide. Type: string (or Expression with resultType string). + */ + @JsonProperty(value = "driverSize") + private Object driverSize; + + /* + * Number of executors to launch for this job, which will override the + * 'numExecutors' of the spark job definition you provide. + */ + @JsonProperty(value = "numExecutors") + private Integer numExecutors; + + /** + * Get the sparkJob property: Synapse spark job reference. + * + * @return the sparkJob value. + */ + public SynapseSparkJobReference sparkJob() { + return this.sparkJob; + } + + /** + * Set the sparkJob property: Synapse spark job reference. + * + * @param sparkJob the sparkJob value to set. + * @return the SynapseSparkJobActivityTypeProperties object itself. + */ + public SynapseSparkJobActivityTypeProperties withSparkJob(SynapseSparkJobReference sparkJob) { + this.sparkJob = sparkJob; + return this; + } + + /** + * Get the arguments property: User specified arguments to SynapseSparkJobDefinitionActivity. + * + * @return the arguments value. + */ + public List arguments() { + return this.arguments; + } + + /** + * Set the arguments property: User specified arguments to SynapseSparkJobDefinitionActivity. + * + * @param arguments the arguments value to set. + * @return the SynapseSparkJobActivityTypeProperties object itself. + */ + public SynapseSparkJobActivityTypeProperties withArguments(List arguments) { + this.arguments = arguments; + return this; + } + + /** + * Get the file property: The main file used for the job, which will override the 'file' of the spark job definition + * you provide. Type: string (or Expression with resultType string). + * + * @return the file value. + */ + public Object file() { + return this.file; + } + + /** + * Set the file property: The main file used for the job, which will override the 'file' of the spark job definition + * you provide. Type: string (or Expression with resultType string). + * + * @param file the file value to set. + * @return the SynapseSparkJobActivityTypeProperties object itself. + */ + public SynapseSparkJobActivityTypeProperties withFile(Object file) { + this.file = file; + return this; + } + + /** + * Get the className property: The fully-qualified identifier or the main class that is in the main definition file, + * which will override the 'className' of the spark job definition you provide. Type: string (or Expression with + * resultType string). + * + * @return the className value. + */ + public Object className() { + return this.className; + } + + /** + * Set the className property: The fully-qualified identifier or the main class that is in the main definition file, + * which will override the 'className' of the spark job definition you provide. Type: string (or Expression with + * resultType string). + * + * @param className the className value to set. + * @return the SynapseSparkJobActivityTypeProperties object itself. + */ + public SynapseSparkJobActivityTypeProperties withClassName(Object className) { + this.className = className; + return this; + } + + /** + * Get the files property: Additional files used for reference in the main definition file, which will override the + * 'files' of the spark job definition you provide. + * + * @return the files value. + */ + public List files() { + return this.files; + } + + /** + * Set the files property: Additional files used for reference in the main definition file, which will override the + * 'files' of the spark job definition you provide. + * + * @param files the files value to set. + * @return the SynapseSparkJobActivityTypeProperties object itself. + */ + public SynapseSparkJobActivityTypeProperties withFiles(List files) { + this.files = files; + return this; + } + + /** + * Get the targetBigDataPool property: The name of the big data pool which will be used to execute the spark batch + * job, which will override the 'targetBigDataPool' of the spark job definition you provide. + * + * @return the targetBigDataPool value. + */ + public BigDataPoolParametrizationReference targetBigDataPool() { + return this.targetBigDataPool; + } + + /** + * Set the targetBigDataPool property: The name of the big data pool which will be used to execute the spark batch + * job, which will override the 'targetBigDataPool' of the spark job definition you provide. + * + * @param targetBigDataPool the targetBigDataPool value to set. + * @return the SynapseSparkJobActivityTypeProperties object itself. + */ + public SynapseSparkJobActivityTypeProperties withTargetBigDataPool( + BigDataPoolParametrizationReference targetBigDataPool) { + this.targetBigDataPool = targetBigDataPool; + return this; + } + + /** + * Get the executorSize property: Number of core and memory to be used for executors allocated in the specified + * Spark pool for the job, which will be used for overriding 'executorCores' and 'executorMemory' of the spark job + * definition you provide. Type: string (or Expression with resultType string). + * + * @return the executorSize value. + */ + public Object executorSize() { + return this.executorSize; + } + + /** + * Set the executorSize property: Number of core and memory to be used for executors allocated in the specified + * Spark pool for the job, which will be used for overriding 'executorCores' and 'executorMemory' of the spark job + * definition you provide. Type: string (or Expression with resultType string). + * + * @param executorSize the executorSize value to set. + * @return the SynapseSparkJobActivityTypeProperties object itself. + */ + public SynapseSparkJobActivityTypeProperties withExecutorSize(Object executorSize) { + this.executorSize = executorSize; + return this; + } + + /** + * Get the conf property: Spark configuration properties, which will override the 'conf' of the spark job definition + * you provide. + * + * @return the conf value. + */ + public Object conf() { + return this.conf; + } + + /** + * Set the conf property: Spark configuration properties, which will override the 'conf' of the spark job definition + * you provide. + * + * @param conf the conf value to set. + * @return the SynapseSparkJobActivityTypeProperties object itself. + */ + public SynapseSparkJobActivityTypeProperties withConf(Object conf) { + this.conf = conf; + return this; + } + + /** + * Get the driverSize property: Number of core and memory to be used for driver allocated in the specified Spark + * pool for the job, which will be used for overriding 'driverCores' and 'driverMemory' of the spark job definition + * you provide. Type: string (or Expression with resultType string). + * + * @return the driverSize value. + */ + public Object driverSize() { + return this.driverSize; + } + + /** + * Set the driverSize property: Number of core and memory to be used for driver allocated in the specified Spark + * pool for the job, which will be used for overriding 'driverCores' and 'driverMemory' of the spark job definition + * you provide. Type: string (or Expression with resultType string). + * + * @param driverSize the driverSize value to set. + * @return the SynapseSparkJobActivityTypeProperties object itself. + */ + public SynapseSparkJobActivityTypeProperties withDriverSize(Object driverSize) { + this.driverSize = driverSize; + return this; + } + + /** + * Get the numExecutors property: Number of executors to launch for this job, which will override the 'numExecutors' + * of the spark job definition you provide. + * + * @return the numExecutors value. + */ + public Integer numExecutors() { + return this.numExecutors; + } + + /** + * Set the numExecutors property: Number of executors to launch for this job, which will override the 'numExecutors' + * of the spark job definition you provide. + * + * @param numExecutors the numExecutors value to set. + * @return the SynapseSparkJobActivityTypeProperties object itself. + */ + public SynapseSparkJobActivityTypeProperties withNumExecutors(Integer numExecutors) { + this.numExecutors = numExecutors; + return this; + } + + /** + * Validates the instance. + * + * @throws IllegalArgumentException thrown if the instance is not valid. + */ + public void validate() { + if (sparkJob() == null) { + throw LOGGER + .logExceptionAsError( + new IllegalArgumentException( + "Missing required property sparkJob in model SynapseSparkJobActivityTypeProperties")); + } else { + sparkJob().validate(); + } + if (targetBigDataPool() != null) { + targetBigDataPool().validate(); + } + } + + private static final ClientLogger LOGGER = new ClientLogger(SynapseSparkJobActivityTypeProperties.class); +} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSynapseArtifactsLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSynapseArtifactsLinkedService.java new file mode 100644 index 000000000000..4c7794c05502 --- /dev/null +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/AzureSynapseArtifactsLinkedService.java @@ -0,0 +1,134 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.datafactory.models; + +import com.azure.core.annotation.Fluent; +import com.azure.core.util.logging.ClientLogger; +import com.azure.resourcemanager.datafactory.fluent.models.AzureSynapseArtifactsLinkedServiceTypeProperties; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeInfo; +import com.fasterxml.jackson.annotation.JsonTypeName; +import java.util.List; +import java.util.Map; + +/** Azure Synapse Analytics (Artifacts) linked service. */ +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeName("AzureSynapseArtifacts") +@Fluent +public final class AzureSynapseArtifactsLinkedService extends LinkedService { + /* + * Azure Synapse Analytics (Artifacts) linked service properties. + */ + @JsonProperty(value = "typeProperties", required = true) + private AzureSynapseArtifactsLinkedServiceTypeProperties innerTypeProperties = + new AzureSynapseArtifactsLinkedServiceTypeProperties(); + + /** + * Get the innerTypeProperties property: Azure Synapse Analytics (Artifacts) linked service properties. + * + * @return the innerTypeProperties value. + */ + private AzureSynapseArtifactsLinkedServiceTypeProperties innerTypeProperties() { + return this.innerTypeProperties; + } + + /** {@inheritDoc} */ + @Override + public AzureSynapseArtifactsLinkedService withConnectVia(IntegrationRuntimeReference connectVia) { + super.withConnectVia(connectVia); + return this; + } + + /** {@inheritDoc} */ + @Override + public AzureSynapseArtifactsLinkedService withDescription(String description) { + super.withDescription(description); + return this; + } + + /** {@inheritDoc} */ + @Override + public AzureSynapseArtifactsLinkedService withParameters(Map parameters) { + super.withParameters(parameters); + return this; + } + + /** {@inheritDoc} */ + @Override + public AzureSynapseArtifactsLinkedService withAnnotations(List annotations) { + super.withAnnotations(annotations); + return this; + } + + /** + * Get the endpoint property: https://<workspacename>.dev.azuresynapse.net, Azure Synapse Analytics workspace + * URL. Type: string (or Expression with resultType string). + * + * @return the endpoint value. + */ + public Object endpoint() { + return this.innerTypeProperties() == null ? null : this.innerTypeProperties().endpoint(); + } + + /** + * Set the endpoint property: https://<workspacename>.dev.azuresynapse.net, Azure Synapse Analytics workspace + * URL. Type: string (or Expression with resultType string). + * + * @param endpoint the endpoint value to set. + * @return the AzureSynapseArtifactsLinkedService object itself. + */ + public AzureSynapseArtifactsLinkedService withEndpoint(Object endpoint) { + if (this.innerTypeProperties() == null) { + this.innerTypeProperties = new AzureSynapseArtifactsLinkedServiceTypeProperties(); + } + this.innerTypeProperties().withEndpoint(endpoint); + return this; + } + + /** + * Get the authentication property: Required to specify MSI, if using system assigned managed identity as + * authentication method. Type: string (or Expression with resultType string). + * + * @return the authentication value. + */ + public Object authentication() { + return this.innerTypeProperties() == null ? null : this.innerTypeProperties().authentication(); + } + + /** + * Set the authentication property: Required to specify MSI, if using system assigned managed identity as + * authentication method. Type: string (or Expression with resultType string). + * + * @param authentication the authentication value to set. + * @return the AzureSynapseArtifactsLinkedService object itself. + */ + public AzureSynapseArtifactsLinkedService withAuthentication(Object authentication) { + if (this.innerTypeProperties() == null) { + this.innerTypeProperties = new AzureSynapseArtifactsLinkedServiceTypeProperties(); + } + this.innerTypeProperties().withAuthentication(authentication); + return this; + } + + /** + * Validates the instance. + * + * @throws IllegalArgumentException thrown if the instance is not valid. + */ + @Override + public void validate() { + super.validate(); + if (innerTypeProperties() == null) { + throw LOGGER + .logExceptionAsError( + new IllegalArgumentException( + "Missing required property innerTypeProperties in model AzureSynapseArtifactsLinkedService")); + } else { + innerTypeProperties().validate(); + } + } + + private static final ClientLogger LOGGER = new ClientLogger(AzureSynapseArtifactsLinkedService.class); +} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BigDataPoolParametrizationReference.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BigDataPoolParametrizationReference.java new file mode 100644 index 000000000000..aadd67f0df73 --- /dev/null +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BigDataPoolParametrizationReference.java @@ -0,0 +1,90 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.datafactory.models; + +import com.azure.core.annotation.Fluent; +import com.azure.core.util.logging.ClientLogger; +import com.fasterxml.jackson.annotation.JsonProperty; + +/** Big data pool reference type. */ +@Fluent +public final class BigDataPoolParametrizationReference { + /* + * Big data pool reference type. + */ + @JsonProperty(value = "type", required = true) + private BigDataPoolReferenceType type; + + /* + * Reference big data pool name. Type: string (or Expression with + * resultType string). + */ + @JsonProperty(value = "referenceName", required = true) + private Object referenceName; + + /** + * Get the type property: Big data pool reference type. + * + * @return the type value. + */ + public BigDataPoolReferenceType type() { + return this.type; + } + + /** + * Set the type property: Big data pool reference type. + * + * @param type the type value to set. + * @return the BigDataPoolParametrizationReference object itself. + */ + public BigDataPoolParametrizationReference withType(BigDataPoolReferenceType type) { + this.type = type; + return this; + } + + /** + * Get the referenceName property: Reference big data pool name. Type: string (or Expression with resultType + * string). + * + * @return the referenceName value. + */ + public Object referenceName() { + return this.referenceName; + } + + /** + * Set the referenceName property: Reference big data pool name. Type: string (or Expression with resultType + * string). + * + * @param referenceName the referenceName value to set. + * @return the BigDataPoolParametrizationReference object itself. + */ + public BigDataPoolParametrizationReference withReferenceName(Object referenceName) { + this.referenceName = referenceName; + return this; + } + + /** + * Validates the instance. + * + * @throws IllegalArgumentException thrown if the instance is not valid. + */ + public void validate() { + if (type() == null) { + throw LOGGER + .logExceptionAsError( + new IllegalArgumentException( + "Missing required property type in model BigDataPoolParametrizationReference")); + } + if (referenceName() == null) { + throw LOGGER + .logExceptionAsError( + new IllegalArgumentException( + "Missing required property referenceName in model BigDataPoolParametrizationReference")); + } + } + + private static final ClientLogger LOGGER = new ClientLogger(BigDataPoolParametrizationReference.class); +} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BigDataPoolReferenceType.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BigDataPoolReferenceType.java new file mode 100644 index 000000000000..c76f13e25633 --- /dev/null +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/BigDataPoolReferenceType.java @@ -0,0 +1,35 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.datafactory.models; + +import com.azure.core.util.ExpandableStringEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import java.util.Collection; + +/** Defines values for BigDataPoolReferenceType. */ +public final class BigDataPoolReferenceType extends ExpandableStringEnum { + /** Static value BigDataPoolReference for BigDataPoolReferenceType. */ + public static final BigDataPoolReferenceType BIG_DATA_POOL_REFERENCE = fromString("BigDataPoolReference"); + + /** + * Creates or finds a BigDataPoolReferenceType from its string representation. + * + * @param name a name to look for. + * @return the corresponding BigDataPoolReferenceType. + */ + @JsonCreator + public static BigDataPoolReferenceType fromString(String name) { + return fromString(name, BigDataPoolReferenceType.class); + } + + /** + * Gets known BigDataPoolReferenceType values. + * + * @return known BigDataPoolReferenceType values. + */ + public static Collection values() { + return values(BigDataPoolReferenceType.class); + } +} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExecutionActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExecutionActivity.java index a47bdd5f427c..46f61acdf816 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExecutionActivity.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/ExecutionActivity.java @@ -42,7 +42,9 @@ @JsonSubTypes.Type(name = "DatabricksSparkPython", value = DatabricksSparkPythonActivity.class), @JsonSubTypes.Type(name = "AzureFunctionActivity", value = AzureFunctionActivity.class), @JsonSubTypes.Type(name = "ExecuteDataFlow", value = ExecuteDataFlowActivity.class), - @JsonSubTypes.Type(name = "Script", value = ScriptActivity.class) + @JsonSubTypes.Type(name = "Script", value = ScriptActivity.class), + @JsonSubTypes.Type(name = "SynapseNotebook", value = SynapseNotebookActivity.class), + @JsonSubTypes.Type(name = "SparkJob", value = SynapseSparkJobDefinitionActivity.class) }) @Fluent public class ExecutionActivity extends Activity { diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HttpLinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HttpLinkedService.java index 853292300b8d..46489bc864dc 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HttpLinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/HttpLinkedService.java @@ -62,7 +62,7 @@ public HttpLinkedService withAnnotations(List annotations) { } /** - * Get the url property: The base URL of the HTTP endpoint, e.g. http://www.microsoft.com. Type: string (or + * Get the url property: The base URL of the HTTP endpoint, e.g. https://www.microsoft.com. Type: string (or * Expression with resultType string). * * @return the url value. @@ -72,7 +72,7 @@ public Object url() { } /** - * Set the url property: The base URL of the HTTP endpoint, e.g. http://www.microsoft.com. Type: string (or + * Set the url property: The base URL of the HTTP endpoint, e.g. https://www.microsoft.com. Type: string (or * Expression with resultType string). * * @param url the url value to set. diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LinkedService.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LinkedService.java index 1dd7660ee5a2..a5077c0fb398 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LinkedService.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/LinkedService.java @@ -138,7 +138,8 @@ @JsonSubTypes.Type(name = "AzureDataExplorer", value = AzureDataExplorerLinkedService.class), @JsonSubTypes.Type(name = "AzureFunction", value = AzureFunctionLinkedService.class), @JsonSubTypes.Type(name = "Snowflake", value = SnowflakeLinkedService.class), - @JsonSubTypes.Type(name = "SharePointOnlineList", value = SharePointOnlineListLinkedService.class) + @JsonSubTypes.Type(name = "SharePointOnlineList", value = SharePointOnlineListLinkedService.class), + @JsonSubTypes.Type(name = "AzureSynapseArtifacts", value = AzureSynapseArtifactsLinkedService.class) }) @Fluent public class LinkedService { diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/NotebookParameter.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/NotebookParameter.java new file mode 100644 index 000000000000..dc81aaddede3 --- /dev/null +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/NotebookParameter.java @@ -0,0 +1,73 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.datafactory.models; + +import com.azure.core.annotation.Fluent; +import com.fasterxml.jackson.annotation.JsonProperty; + +/** Notebook parameter. */ +@Fluent +public final class NotebookParameter { + /* + * Notebook parameter value. Type: string (or Expression with resultType + * string). + */ + @JsonProperty(value = "value") + private Object value; + + /* + * Notebook parameter type. + */ + @JsonProperty(value = "type") + private NotebookParameterType type; + + /** + * Get the value property: Notebook parameter value. Type: string (or Expression with resultType string). + * + * @return the value value. + */ + public Object value() { + return this.value; + } + + /** + * Set the value property: Notebook parameter value. Type: string (or Expression with resultType string). + * + * @param value the value value to set. + * @return the NotebookParameter object itself. + */ + public NotebookParameter withValue(Object value) { + this.value = value; + return this; + } + + /** + * Get the type property: Notebook parameter type. + * + * @return the type value. + */ + public NotebookParameterType type() { + return this.type; + } + + /** + * Set the type property: Notebook parameter type. + * + * @param type the type value to set. + * @return the NotebookParameter object itself. + */ + public NotebookParameter withType(NotebookParameterType type) { + this.type = type; + return this; + } + + /** + * Validates the instance. + * + * @throws IllegalArgumentException thrown if the instance is not valid. + */ + public void validate() { + } +} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/NotebookParameterType.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/NotebookParameterType.java new file mode 100644 index 000000000000..c8779310bb0f --- /dev/null +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/NotebookParameterType.java @@ -0,0 +1,44 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.datafactory.models; + +import com.azure.core.util.ExpandableStringEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import java.util.Collection; + +/** Defines values for NotebookParameterType. */ +public final class NotebookParameterType extends ExpandableStringEnum { + /** Static value string for NotebookParameterType. */ + public static final NotebookParameterType STRING = fromString("string"); + + /** Static value int for NotebookParameterType. */ + public static final NotebookParameterType INT = fromString("int"); + + /** Static value float for NotebookParameterType. */ + public static final NotebookParameterType FLOAT = fromString("float"); + + /** Static value bool for NotebookParameterType. */ + public static final NotebookParameterType BOOL = fromString("bool"); + + /** + * Creates or finds a NotebookParameterType from its string representation. + * + * @param name a name to look for. + * @return the corresponding NotebookParameterType. + */ + @JsonCreator + public static NotebookParameterType fromString(String name) { + return fromString(name, NotebookParameterType.class); + } + + /** + * Gets known NotebookParameterType values. + * + * @return known NotebookParameterType values. + */ + public static Collection values() { + return values(NotebookParameterType.class); + } +} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/NotebookReferenceType.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/NotebookReferenceType.java new file mode 100644 index 000000000000..2e01cfa2ddc6 --- /dev/null +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/NotebookReferenceType.java @@ -0,0 +1,35 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.datafactory.models; + +import com.azure.core.util.ExpandableStringEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import java.util.Collection; + +/** Defines values for NotebookReferenceType. */ +public final class NotebookReferenceType extends ExpandableStringEnum { + /** Static value NotebookReference for NotebookReferenceType. */ + public static final NotebookReferenceType NOTEBOOK_REFERENCE = fromString("NotebookReference"); + + /** + * Creates or finds a NotebookReferenceType from its string representation. + * + * @param name a name to look for. + * @return the corresponding NotebookReferenceType. + */ + @JsonCreator + public static NotebookReferenceType fromString(String name) { + return fromString(name, NotebookReferenceType.class); + } + + /** + * Gets known NotebookReferenceType values. + * + * @return known NotebookReferenceType values. + */ + public static Collection values() { + return values(NotebookReferenceType.class); + } +} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SparkJobReferenceType.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SparkJobReferenceType.java new file mode 100644 index 000000000000..0fae1d1da696 --- /dev/null +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SparkJobReferenceType.java @@ -0,0 +1,36 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.datafactory.models; + +import com.azure.core.util.ExpandableStringEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import java.util.Collection; + +/** Defines values for SparkJobReferenceType. */ +public final class SparkJobReferenceType extends ExpandableStringEnum { + /** Static value SparkJobDefinitionReference for SparkJobReferenceType. */ + public static final SparkJobReferenceType SPARK_JOB_DEFINITION_REFERENCE = + fromString("SparkJobDefinitionReference"); + + /** + * Creates or finds a SparkJobReferenceType from its string representation. + * + * @param name a name to look for. + * @return the corresponding SparkJobReferenceType. + */ + @JsonCreator + public static SparkJobReferenceType fromString(String name) { + return fromString(name, SparkJobReferenceType.class); + } + + /** + * Gets known SparkJobReferenceType values. + * + * @return known SparkJobReferenceType values. + */ + public static Collection values() { + return values(SparkJobReferenceType.class); + } +} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SynapseNotebookActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SynapseNotebookActivity.java new file mode 100644 index 000000000000..c01370887f53 --- /dev/null +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SynapseNotebookActivity.java @@ -0,0 +1,270 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.datafactory.models; + +import com.azure.core.annotation.Fluent; +import com.azure.core.util.logging.ClientLogger; +import com.azure.resourcemanager.datafactory.fluent.models.SynapseNotebookActivityTypeProperties; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeInfo; +import com.fasterxml.jackson.annotation.JsonTypeName; +import java.util.List; +import java.util.Map; + +/** Execute Synapse notebook activity. */ +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeName("SynapseNotebook") +@Fluent +public final class SynapseNotebookActivity extends ExecutionActivity { + /* + * Execute Synapse notebook activity properties. + */ + @JsonProperty(value = "typeProperties", required = true) + private SynapseNotebookActivityTypeProperties innerTypeProperties = new SynapseNotebookActivityTypeProperties(); + + /** + * Get the innerTypeProperties property: Execute Synapse notebook activity properties. + * + * @return the innerTypeProperties value. + */ + private SynapseNotebookActivityTypeProperties innerTypeProperties() { + return this.innerTypeProperties; + } + + /** {@inheritDoc} */ + @Override + public SynapseNotebookActivity withLinkedServiceName(LinkedServiceReference linkedServiceName) { + super.withLinkedServiceName(linkedServiceName); + return this; + } + + /** {@inheritDoc} */ + @Override + public SynapseNotebookActivity withPolicy(ActivityPolicy policy) { + super.withPolicy(policy); + return this; + } + + /** {@inheritDoc} */ + @Override + public SynapseNotebookActivity withName(String name) { + super.withName(name); + return this; + } + + /** {@inheritDoc} */ + @Override + public SynapseNotebookActivity withDescription(String description) { + super.withDescription(description); + return this; + } + + /** {@inheritDoc} */ + @Override + public SynapseNotebookActivity withDependsOn(List dependsOn) { + super.withDependsOn(dependsOn); + return this; + } + + /** {@inheritDoc} */ + @Override + public SynapseNotebookActivity withUserProperties(List userProperties) { + super.withUserProperties(userProperties); + return this; + } + + /** + * Get the notebook property: Synapse notebook reference. + * + * @return the notebook value. + */ + public SynapseNotebookReference notebook() { + return this.innerTypeProperties() == null ? null : this.innerTypeProperties().notebook(); + } + + /** + * Set the notebook property: Synapse notebook reference. + * + * @param notebook the notebook value to set. + * @return the SynapseNotebookActivity object itself. + */ + public SynapseNotebookActivity withNotebook(SynapseNotebookReference notebook) { + if (this.innerTypeProperties() == null) { + this.innerTypeProperties = new SynapseNotebookActivityTypeProperties(); + } + this.innerTypeProperties().withNotebook(notebook); + return this; + } + + /** + * Get the sparkPool property: The name of the big data pool which will be used to execute the notebook. + * + * @return the sparkPool value. + */ + public BigDataPoolParametrizationReference sparkPool() { + return this.innerTypeProperties() == null ? null : this.innerTypeProperties().sparkPool(); + } + + /** + * Set the sparkPool property: The name of the big data pool which will be used to execute the notebook. + * + * @param sparkPool the sparkPool value to set. + * @return the SynapseNotebookActivity object itself. + */ + public SynapseNotebookActivity withSparkPool(BigDataPoolParametrizationReference sparkPool) { + if (this.innerTypeProperties() == null) { + this.innerTypeProperties = new SynapseNotebookActivityTypeProperties(); + } + this.innerTypeProperties().withSparkPool(sparkPool); + return this; + } + + /** + * Get the parameters property: Notebook parameters. + * + * @return the parameters value. + */ + public Map parameters() { + return this.innerTypeProperties() == null ? null : this.innerTypeProperties().parameters(); + } + + /** + * Set the parameters property: Notebook parameters. + * + * @param parameters the parameters value to set. + * @return the SynapseNotebookActivity object itself. + */ + public SynapseNotebookActivity withParameters(Map parameters) { + if (this.innerTypeProperties() == null) { + this.innerTypeProperties = new SynapseNotebookActivityTypeProperties(); + } + this.innerTypeProperties().withParameters(parameters); + return this; + } + + /** + * Get the executorSize property: Number of core and memory to be used for executors allocated in the specified + * Spark pool for the session, which will be used for overriding 'executorCores' and 'executorMemory' of the + * notebook you provide. Type: string (or Expression with resultType string). + * + * @return the executorSize value. + */ + public Object executorSize() { + return this.innerTypeProperties() == null ? null : this.innerTypeProperties().executorSize(); + } + + /** + * Set the executorSize property: Number of core and memory to be used for executors allocated in the specified + * Spark pool for the session, which will be used for overriding 'executorCores' and 'executorMemory' of the + * notebook you provide. Type: string (or Expression with resultType string). + * + * @param executorSize the executorSize value to set. + * @return the SynapseNotebookActivity object itself. + */ + public SynapseNotebookActivity withExecutorSize(Object executorSize) { + if (this.innerTypeProperties() == null) { + this.innerTypeProperties = new SynapseNotebookActivityTypeProperties(); + } + this.innerTypeProperties().withExecutorSize(executorSize); + return this; + } + + /** + * Get the conf property: Spark configuration properties, which will override the 'conf' of the notebook you + * provide. + * + * @return the conf value. + */ + public Object conf() { + return this.innerTypeProperties() == null ? null : this.innerTypeProperties().conf(); + } + + /** + * Set the conf property: Spark configuration properties, which will override the 'conf' of the notebook you + * provide. + * + * @param conf the conf value to set. + * @return the SynapseNotebookActivity object itself. + */ + public SynapseNotebookActivity withConf(Object conf) { + if (this.innerTypeProperties() == null) { + this.innerTypeProperties = new SynapseNotebookActivityTypeProperties(); + } + this.innerTypeProperties().withConf(conf); + return this; + } + + /** + * Get the driverSize property: Number of core and memory to be used for driver allocated in the specified Spark + * pool for the session, which will be used for overriding 'driverCores' and 'driverMemory' of the notebook you + * provide. Type: string (or Expression with resultType string). + * + * @return the driverSize value. + */ + public Object driverSize() { + return this.innerTypeProperties() == null ? null : this.innerTypeProperties().driverSize(); + } + + /** + * Set the driverSize property: Number of core and memory to be used for driver allocated in the specified Spark + * pool for the session, which will be used for overriding 'driverCores' and 'driverMemory' of the notebook you + * provide. Type: string (or Expression with resultType string). + * + * @param driverSize the driverSize value to set. + * @return the SynapseNotebookActivity object itself. + */ + public SynapseNotebookActivity withDriverSize(Object driverSize) { + if (this.innerTypeProperties() == null) { + this.innerTypeProperties = new SynapseNotebookActivityTypeProperties(); + } + this.innerTypeProperties().withDriverSize(driverSize); + return this; + } + + /** + * Get the numExecutors property: Number of executors to launch for this session, which will override the + * 'numExecutors' of the notebook you provide. + * + * @return the numExecutors value. + */ + public Integer numExecutors() { + return this.innerTypeProperties() == null ? null : this.innerTypeProperties().numExecutors(); + } + + /** + * Set the numExecutors property: Number of executors to launch for this session, which will override the + * 'numExecutors' of the notebook you provide. + * + * @param numExecutors the numExecutors value to set. + * @return the SynapseNotebookActivity object itself. + */ + public SynapseNotebookActivity withNumExecutors(Integer numExecutors) { + if (this.innerTypeProperties() == null) { + this.innerTypeProperties = new SynapseNotebookActivityTypeProperties(); + } + this.innerTypeProperties().withNumExecutors(numExecutors); + return this; + } + + /** + * Validates the instance. + * + * @throws IllegalArgumentException thrown if the instance is not valid. + */ + @Override + public void validate() { + super.validate(); + if (innerTypeProperties() == null) { + throw LOGGER + .logExceptionAsError( + new IllegalArgumentException( + "Missing required property innerTypeProperties in model SynapseNotebookActivity")); + } else { + innerTypeProperties().validate(); + } + } + + private static final ClientLogger LOGGER = new ClientLogger(SynapseNotebookActivity.class); +} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SynapseNotebookReference.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SynapseNotebookReference.java new file mode 100644 index 000000000000..cba07b178bbe --- /dev/null +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SynapseNotebookReference.java @@ -0,0 +1,87 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.datafactory.models; + +import com.azure.core.annotation.Fluent; +import com.azure.core.util.logging.ClientLogger; +import com.fasterxml.jackson.annotation.JsonProperty; + +/** Synapse notebook reference type. */ +@Fluent +public final class SynapseNotebookReference { + /* + * Synapse notebook reference type. + */ + @JsonProperty(value = "type", required = true) + private NotebookReferenceType type; + + /* + * Reference notebook name. Type: string (or Expression with resultType + * string). + */ + @JsonProperty(value = "referenceName", required = true) + private Object referenceName; + + /** + * Get the type property: Synapse notebook reference type. + * + * @return the type value. + */ + public NotebookReferenceType type() { + return this.type; + } + + /** + * Set the type property: Synapse notebook reference type. + * + * @param type the type value to set. + * @return the SynapseNotebookReference object itself. + */ + public SynapseNotebookReference withType(NotebookReferenceType type) { + this.type = type; + return this; + } + + /** + * Get the referenceName property: Reference notebook name. Type: string (or Expression with resultType string). + * + * @return the referenceName value. + */ + public Object referenceName() { + return this.referenceName; + } + + /** + * Set the referenceName property: Reference notebook name. Type: string (or Expression with resultType string). + * + * @param referenceName the referenceName value to set. + * @return the SynapseNotebookReference object itself. + */ + public SynapseNotebookReference withReferenceName(Object referenceName) { + this.referenceName = referenceName; + return this; + } + + /** + * Validates the instance. + * + * @throws IllegalArgumentException thrown if the instance is not valid. + */ + public void validate() { + if (type() == null) { + throw LOGGER + .logExceptionAsError( + new IllegalArgumentException("Missing required property type in model SynapseNotebookReference")); + } + if (referenceName() == null) { + throw LOGGER + .logExceptionAsError( + new IllegalArgumentException( + "Missing required property referenceName in model SynapseNotebookReference")); + } + } + + private static final ClientLogger LOGGER = new ClientLogger(SynapseNotebookReference.class); +} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SynapseSparkJobDefinitionActivity.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SynapseSparkJobDefinitionActivity.java new file mode 100644 index 000000000000..f22c0a846bb4 --- /dev/null +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SynapseSparkJobDefinitionActivity.java @@ -0,0 +1,349 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.datafactory.models; + +import com.azure.core.annotation.Fluent; +import com.azure.core.util.logging.ClientLogger; +import com.azure.resourcemanager.datafactory.fluent.models.SynapseSparkJobActivityTypeProperties; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeInfo; +import com.fasterxml.jackson.annotation.JsonTypeName; +import java.util.List; + +/** Execute spark job activity. */ +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeName("SparkJob") +@Fluent +public final class SynapseSparkJobDefinitionActivity extends ExecutionActivity { + /* + * Execute spark job activity properties. + */ + @JsonProperty(value = "typeProperties", required = true) + private SynapseSparkJobActivityTypeProperties innerTypeProperties = new SynapseSparkJobActivityTypeProperties(); + + /** + * Get the innerTypeProperties property: Execute spark job activity properties. + * + * @return the innerTypeProperties value. + */ + private SynapseSparkJobActivityTypeProperties innerTypeProperties() { + return this.innerTypeProperties; + } + + /** {@inheritDoc} */ + @Override + public SynapseSparkJobDefinitionActivity withLinkedServiceName(LinkedServiceReference linkedServiceName) { + super.withLinkedServiceName(linkedServiceName); + return this; + } + + /** {@inheritDoc} */ + @Override + public SynapseSparkJobDefinitionActivity withPolicy(ActivityPolicy policy) { + super.withPolicy(policy); + return this; + } + + /** {@inheritDoc} */ + @Override + public SynapseSparkJobDefinitionActivity withName(String name) { + super.withName(name); + return this; + } + + /** {@inheritDoc} */ + @Override + public SynapseSparkJobDefinitionActivity withDescription(String description) { + super.withDescription(description); + return this; + } + + /** {@inheritDoc} */ + @Override + public SynapseSparkJobDefinitionActivity withDependsOn(List dependsOn) { + super.withDependsOn(dependsOn); + return this; + } + + /** {@inheritDoc} */ + @Override + public SynapseSparkJobDefinitionActivity withUserProperties(List userProperties) { + super.withUserProperties(userProperties); + return this; + } + + /** + * Get the sparkJob property: Synapse spark job reference. + * + * @return the sparkJob value. + */ + public SynapseSparkJobReference sparkJob() { + return this.innerTypeProperties() == null ? null : this.innerTypeProperties().sparkJob(); + } + + /** + * Set the sparkJob property: Synapse spark job reference. + * + * @param sparkJob the sparkJob value to set. + * @return the SynapseSparkJobDefinitionActivity object itself. + */ + public SynapseSparkJobDefinitionActivity withSparkJob(SynapseSparkJobReference sparkJob) { + if (this.innerTypeProperties() == null) { + this.innerTypeProperties = new SynapseSparkJobActivityTypeProperties(); + } + this.innerTypeProperties().withSparkJob(sparkJob); + return this; + } + + /** + * Get the arguments property: User specified arguments to SynapseSparkJobDefinitionActivity. + * + * @return the arguments value. + */ + public List arguments() { + return this.innerTypeProperties() == null ? null : this.innerTypeProperties().arguments(); + } + + /** + * Set the arguments property: User specified arguments to SynapseSparkJobDefinitionActivity. + * + * @param arguments the arguments value to set. + * @return the SynapseSparkJobDefinitionActivity object itself. + */ + public SynapseSparkJobDefinitionActivity withArguments(List arguments) { + if (this.innerTypeProperties() == null) { + this.innerTypeProperties = new SynapseSparkJobActivityTypeProperties(); + } + this.innerTypeProperties().withArguments(arguments); + return this; + } + + /** + * Get the file property: The main file used for the job, which will override the 'file' of the spark job definition + * you provide. Type: string (or Expression with resultType string). + * + * @return the file value. + */ + public Object file() { + return this.innerTypeProperties() == null ? null : this.innerTypeProperties().file(); + } + + /** + * Set the file property: The main file used for the job, which will override the 'file' of the spark job definition + * you provide. Type: string (or Expression with resultType string). + * + * @param file the file value to set. + * @return the SynapseSparkJobDefinitionActivity object itself. + */ + public SynapseSparkJobDefinitionActivity withFile(Object file) { + if (this.innerTypeProperties() == null) { + this.innerTypeProperties = new SynapseSparkJobActivityTypeProperties(); + } + this.innerTypeProperties().withFile(file); + return this; + } + + /** + * Get the className property: The fully-qualified identifier or the main class that is in the main definition file, + * which will override the 'className' of the spark job definition you provide. Type: string (or Expression with + * resultType string). + * + * @return the className value. + */ + public Object className() { + return this.innerTypeProperties() == null ? null : this.innerTypeProperties().className(); + } + + /** + * Set the className property: The fully-qualified identifier or the main class that is in the main definition file, + * which will override the 'className' of the spark job definition you provide. Type: string (or Expression with + * resultType string). + * + * @param className the className value to set. + * @return the SynapseSparkJobDefinitionActivity object itself. + */ + public SynapseSparkJobDefinitionActivity withClassName(Object className) { + if (this.innerTypeProperties() == null) { + this.innerTypeProperties = new SynapseSparkJobActivityTypeProperties(); + } + this.innerTypeProperties().withClassName(className); + return this; + } + + /** + * Get the files property: Additional files used for reference in the main definition file, which will override the + * 'files' of the spark job definition you provide. + * + * @return the files value. + */ + public List files() { + return this.innerTypeProperties() == null ? null : this.innerTypeProperties().files(); + } + + /** + * Set the files property: Additional files used for reference in the main definition file, which will override the + * 'files' of the spark job definition you provide. + * + * @param files the files value to set. + * @return the SynapseSparkJobDefinitionActivity object itself. + */ + public SynapseSparkJobDefinitionActivity withFiles(List files) { + if (this.innerTypeProperties() == null) { + this.innerTypeProperties = new SynapseSparkJobActivityTypeProperties(); + } + this.innerTypeProperties().withFiles(files); + return this; + } + + /** + * Get the targetBigDataPool property: The name of the big data pool which will be used to execute the spark batch + * job, which will override the 'targetBigDataPool' of the spark job definition you provide. + * + * @return the targetBigDataPool value. + */ + public BigDataPoolParametrizationReference targetBigDataPool() { + return this.innerTypeProperties() == null ? null : this.innerTypeProperties().targetBigDataPool(); + } + + /** + * Set the targetBigDataPool property: The name of the big data pool which will be used to execute the spark batch + * job, which will override the 'targetBigDataPool' of the spark job definition you provide. + * + * @param targetBigDataPool the targetBigDataPool value to set. + * @return the SynapseSparkJobDefinitionActivity object itself. + */ + public SynapseSparkJobDefinitionActivity withTargetBigDataPool( + BigDataPoolParametrizationReference targetBigDataPool) { + if (this.innerTypeProperties() == null) { + this.innerTypeProperties = new SynapseSparkJobActivityTypeProperties(); + } + this.innerTypeProperties().withTargetBigDataPool(targetBigDataPool); + return this; + } + + /** + * Get the executorSize property: Number of core and memory to be used for executors allocated in the specified + * Spark pool for the job, which will be used for overriding 'executorCores' and 'executorMemory' of the spark job + * definition you provide. Type: string (or Expression with resultType string). + * + * @return the executorSize value. + */ + public Object executorSize() { + return this.innerTypeProperties() == null ? null : this.innerTypeProperties().executorSize(); + } + + /** + * Set the executorSize property: Number of core and memory to be used for executors allocated in the specified + * Spark pool for the job, which will be used for overriding 'executorCores' and 'executorMemory' of the spark job + * definition you provide. Type: string (or Expression with resultType string). + * + * @param executorSize the executorSize value to set. + * @return the SynapseSparkJobDefinitionActivity object itself. + */ + public SynapseSparkJobDefinitionActivity withExecutorSize(Object executorSize) { + if (this.innerTypeProperties() == null) { + this.innerTypeProperties = new SynapseSparkJobActivityTypeProperties(); + } + this.innerTypeProperties().withExecutorSize(executorSize); + return this; + } + + /** + * Get the conf property: Spark configuration properties, which will override the 'conf' of the spark job definition + * you provide. + * + * @return the conf value. + */ + public Object conf() { + return this.innerTypeProperties() == null ? null : this.innerTypeProperties().conf(); + } + + /** + * Set the conf property: Spark configuration properties, which will override the 'conf' of the spark job definition + * you provide. + * + * @param conf the conf value to set. + * @return the SynapseSparkJobDefinitionActivity object itself. + */ + public SynapseSparkJobDefinitionActivity withConf(Object conf) { + if (this.innerTypeProperties() == null) { + this.innerTypeProperties = new SynapseSparkJobActivityTypeProperties(); + } + this.innerTypeProperties().withConf(conf); + return this; + } + + /** + * Get the driverSize property: Number of core and memory to be used for driver allocated in the specified Spark + * pool for the job, which will be used for overriding 'driverCores' and 'driverMemory' of the spark job definition + * you provide. Type: string (or Expression with resultType string). + * + * @return the driverSize value. + */ + public Object driverSize() { + return this.innerTypeProperties() == null ? null : this.innerTypeProperties().driverSize(); + } + + /** + * Set the driverSize property: Number of core and memory to be used for driver allocated in the specified Spark + * pool for the job, which will be used for overriding 'driverCores' and 'driverMemory' of the spark job definition + * you provide. Type: string (or Expression with resultType string). + * + * @param driverSize the driverSize value to set. + * @return the SynapseSparkJobDefinitionActivity object itself. + */ + public SynapseSparkJobDefinitionActivity withDriverSize(Object driverSize) { + if (this.innerTypeProperties() == null) { + this.innerTypeProperties = new SynapseSparkJobActivityTypeProperties(); + } + this.innerTypeProperties().withDriverSize(driverSize); + return this; + } + + /** + * Get the numExecutors property: Number of executors to launch for this job, which will override the 'numExecutors' + * of the spark job definition you provide. + * + * @return the numExecutors value. + */ + public Integer numExecutors() { + return this.innerTypeProperties() == null ? null : this.innerTypeProperties().numExecutors(); + } + + /** + * Set the numExecutors property: Number of executors to launch for this job, which will override the 'numExecutors' + * of the spark job definition you provide. + * + * @param numExecutors the numExecutors value to set. + * @return the SynapseSparkJobDefinitionActivity object itself. + */ + public SynapseSparkJobDefinitionActivity withNumExecutors(Integer numExecutors) { + if (this.innerTypeProperties() == null) { + this.innerTypeProperties = new SynapseSparkJobActivityTypeProperties(); + } + this.innerTypeProperties().withNumExecutors(numExecutors); + return this; + } + + /** + * Validates the instance. + * + * @throws IllegalArgumentException thrown if the instance is not valid. + */ + @Override + public void validate() { + super.validate(); + if (innerTypeProperties() == null) { + throw LOGGER + .logExceptionAsError( + new IllegalArgumentException( + "Missing required property innerTypeProperties in model SynapseSparkJobDefinitionActivity")); + } else { + innerTypeProperties().validate(); + } + } + + private static final ClientLogger LOGGER = new ClientLogger(SynapseSparkJobDefinitionActivity.class); +} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SynapseSparkJobReference.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SynapseSparkJobReference.java new file mode 100644 index 000000000000..420809a51a6f --- /dev/null +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/SynapseSparkJobReference.java @@ -0,0 +1,86 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.datafactory.models; + +import com.azure.core.annotation.Fluent; +import com.azure.core.util.logging.ClientLogger; +import com.fasterxml.jackson.annotation.JsonProperty; + +/** Synapse spark job reference type. */ +@Fluent +public final class SynapseSparkJobReference { + /* + * Synapse spark job reference type. + */ + @JsonProperty(value = "type", required = true) + private SparkJobReferenceType type; + + /* + * Reference spark job name. + */ + @JsonProperty(value = "referenceName", required = true) + private String referenceName; + + /** + * Get the type property: Synapse spark job reference type. + * + * @return the type value. + */ + public SparkJobReferenceType type() { + return this.type; + } + + /** + * Set the type property: Synapse spark job reference type. + * + * @param type the type value to set. + * @return the SynapseSparkJobReference object itself. + */ + public SynapseSparkJobReference withType(SparkJobReferenceType type) { + this.type = type; + return this; + } + + /** + * Get the referenceName property: Reference spark job name. + * + * @return the referenceName value. + */ + public String referenceName() { + return this.referenceName; + } + + /** + * Set the referenceName property: Reference spark job name. + * + * @param referenceName the referenceName value to set. + * @return the SynapseSparkJobReference object itself. + */ + public SynapseSparkJobReference withReferenceName(String referenceName) { + this.referenceName = referenceName; + return this; + } + + /** + * Validates the instance. + * + * @throws IllegalArgumentException thrown if the instance is not valid. + */ + public void validate() { + if (type() == null) { + throw LOGGER + .logExceptionAsError( + new IllegalArgumentException("Missing required property type in model SynapseSparkJobReference")); + } + if (referenceName() == null) { + throw LOGGER + .logExceptionAsError( + new IllegalArgumentException( + "Missing required property referenceName in model SynapseSparkJobReference")); + } + } + + private static final ClientLogger LOGGER = new ClientLogger(SynapseSparkJobReference.class); +} diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebLinkedServiceTypeProperties.java index f50820ca9f15..daed1dc1c82a 100644 --- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebLinkedServiceTypeProperties.java +++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/models/WebLinkedServiceTypeProperties.java @@ -29,14 +29,14 @@ @Fluent public class WebLinkedServiceTypeProperties { /* - * The URL of the web service endpoint, e.g. http://www.microsoft.com . + * The URL of the web service endpoint, e.g. https://www.microsoft.com . * Type: string (or Expression with resultType string). */ @JsonProperty(value = "url", required = true) private Object url; /** - * Get the url property: The URL of the web service endpoint, e.g. http://www.microsoft.com . Type: string (or + * Get the url property: The URL of the web service endpoint, e.g. https://www.microsoft.com . Type: string (or * Expression with resultType string). * * @return the url value. @@ -46,7 +46,7 @@ public Object url() { } /** - * Set the url property: The URL of the web service endpoint, e.g. http://www.microsoft.com . Type: string (or + * Set the url property: The URL of the web service endpoint, e.g. https://www.microsoft.com . Type: string (or * Expression with resultType string). * * @param url the url value to set.